var/home/core/zuul-output/0000755000175000017500000000000015114430003014514 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114443425015474 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000005252122715114443416017706 0ustar rootrootDec 05 01:10:23 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 01:10:23 crc restorecon[4584]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:23 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 01:10:24 crc restorecon[4584]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 01:10:24 crc kubenswrapper[4665]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 01:10:24 crc kubenswrapper[4665]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 01:10:24 crc kubenswrapper[4665]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 01:10:24 crc kubenswrapper[4665]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 01:10:24 crc kubenswrapper[4665]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 01:10:24 crc kubenswrapper[4665]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.770996 4665 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775266 4665 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775284 4665 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775288 4665 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775295 4665 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775299 4665 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775304 4665 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775309 4665 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775313 4665 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775330 4665 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775334 4665 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775338 4665 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775342 4665 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775346 4665 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775351 4665 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775356 4665 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775359 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775364 4665 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775368 4665 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775372 4665 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775376 4665 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775380 4665 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775385 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775389 4665 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775393 4665 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775397 4665 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775401 4665 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775404 4665 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775409 4665 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775413 4665 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775416 4665 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775420 4665 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775423 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775427 4665 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775431 4665 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775434 4665 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775442 4665 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775446 4665 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775450 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775454 4665 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775458 4665 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775461 4665 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775465 4665 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775469 4665 feature_gate.go:330] unrecognized feature gate: Example Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775472 4665 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775476 4665 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775479 4665 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775483 4665 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775487 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775491 4665 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775495 4665 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775498 4665 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775502 4665 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775506 4665 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775510 4665 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775514 4665 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775518 4665 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775523 4665 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775526 4665 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775530 4665 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775534 4665 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775538 4665 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775707 4665 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775713 4665 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775717 4665 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775721 4665 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775725 4665 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775729 4665 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775733 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775736 4665 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775740 4665 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.775744 4665 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775807 4665 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775814 4665 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775821 4665 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775827 4665 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775832 4665 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775837 4665 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775843 4665 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775848 4665 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775852 4665 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775856 4665 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775861 4665 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775865 4665 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775869 4665 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775874 4665 flags.go:64] FLAG: --cgroup-root="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775878 4665 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775882 4665 flags.go:64] FLAG: --client-ca-file="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775886 4665 flags.go:64] FLAG: --cloud-config="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775890 4665 flags.go:64] FLAG: --cloud-provider="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775894 4665 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775901 4665 flags.go:64] FLAG: --cluster-domain="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775905 4665 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775909 4665 flags.go:64] FLAG: --config-dir="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775913 4665 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775917 4665 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775922 4665 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775926 4665 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775930 4665 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775935 4665 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775939 4665 flags.go:64] FLAG: --contention-profiling="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775944 4665 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775948 4665 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775952 4665 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775956 4665 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775961 4665 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775965 4665 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775969 4665 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775973 4665 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775977 4665 flags.go:64] FLAG: --enable-server="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775981 4665 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775986 4665 flags.go:64] FLAG: --event-burst="100" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775990 4665 flags.go:64] FLAG: --event-qps="50" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775994 4665 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.775998 4665 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776002 4665 flags.go:64] FLAG: --eviction-hard="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776007 4665 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776013 4665 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776017 4665 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776023 4665 flags.go:64] FLAG: --eviction-soft="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776027 4665 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776031 4665 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776035 4665 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776039 4665 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776043 4665 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776047 4665 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776051 4665 flags.go:64] FLAG: --feature-gates="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776061 4665 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776065 4665 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776069 4665 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776073 4665 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776077 4665 flags.go:64] FLAG: --healthz-port="10248" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776081 4665 flags.go:64] FLAG: --help="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776085 4665 flags.go:64] FLAG: --hostname-override="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776089 4665 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776093 4665 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776097 4665 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776101 4665 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776105 4665 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776109 4665 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776113 4665 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776117 4665 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776122 4665 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776126 4665 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776130 4665 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776134 4665 flags.go:64] FLAG: --kube-reserved="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776138 4665 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776142 4665 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776147 4665 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776151 4665 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776155 4665 flags.go:64] FLAG: --lock-file="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776159 4665 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776163 4665 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776167 4665 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776173 4665 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776177 4665 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776181 4665 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776185 4665 flags.go:64] FLAG: --logging-format="text" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776189 4665 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776194 4665 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776197 4665 flags.go:64] FLAG: --manifest-url="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776201 4665 flags.go:64] FLAG: --manifest-url-header="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776206 4665 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776210 4665 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776215 4665 flags.go:64] FLAG: --max-pods="110" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776219 4665 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776223 4665 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776227 4665 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776231 4665 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776235 4665 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776240 4665 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776243 4665 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776252 4665 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776256 4665 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776260 4665 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776264 4665 flags.go:64] FLAG: --pod-cidr="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776268 4665 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776274 4665 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776278 4665 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776282 4665 flags.go:64] FLAG: --pods-per-core="0" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776286 4665 flags.go:64] FLAG: --port="10250" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776290 4665 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776298 4665 flags.go:64] FLAG: --provider-id="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776303 4665 flags.go:64] FLAG: --qos-reserved="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776307 4665 flags.go:64] FLAG: --read-only-port="10255" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776311 4665 flags.go:64] FLAG: --register-node="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776326 4665 flags.go:64] FLAG: --register-schedulable="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776330 4665 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776337 4665 flags.go:64] FLAG: --registry-burst="10" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776341 4665 flags.go:64] FLAG: --registry-qps="5" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776345 4665 flags.go:64] FLAG: --reserved-cpus="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776350 4665 flags.go:64] FLAG: --reserved-memory="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776355 4665 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776359 4665 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776363 4665 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776367 4665 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776371 4665 flags.go:64] FLAG: --runonce="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776374 4665 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776378 4665 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776383 4665 flags.go:64] FLAG: --seccomp-default="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776387 4665 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776390 4665 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776394 4665 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776398 4665 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776402 4665 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776406 4665 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776410 4665 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776414 4665 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776418 4665 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776422 4665 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776426 4665 flags.go:64] FLAG: --system-cgroups="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776430 4665 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776437 4665 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776441 4665 flags.go:64] FLAG: --tls-cert-file="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776445 4665 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776450 4665 flags.go:64] FLAG: --tls-min-version="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776454 4665 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776458 4665 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776462 4665 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776466 4665 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776471 4665 flags.go:64] FLAG: --v="2" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776477 4665 flags.go:64] FLAG: --version="false" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776482 4665 flags.go:64] FLAG: --vmodule="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776487 4665 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776491 4665 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776591 4665 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776596 4665 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776600 4665 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776604 4665 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776607 4665 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776611 4665 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776616 4665 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776620 4665 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776624 4665 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776628 4665 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776632 4665 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776636 4665 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776639 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776643 4665 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776647 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776651 4665 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776655 4665 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776658 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776662 4665 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776666 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776669 4665 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776673 4665 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776677 4665 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776680 4665 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776684 4665 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776688 4665 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776691 4665 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776695 4665 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776698 4665 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776702 4665 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776705 4665 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776709 4665 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776712 4665 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776716 4665 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776719 4665 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776724 4665 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776728 4665 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776732 4665 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776736 4665 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776744 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776750 4665 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776754 4665 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776771 4665 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776774 4665 feature_gate.go:330] unrecognized feature gate: Example Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776778 4665 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776781 4665 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776786 4665 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776791 4665 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776795 4665 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776799 4665 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776803 4665 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776807 4665 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776811 4665 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776814 4665 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776818 4665 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776821 4665 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776825 4665 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776829 4665 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776832 4665 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776836 4665 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776839 4665 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776843 4665 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776846 4665 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776851 4665 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776855 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776858 4665 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776862 4665 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776865 4665 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776869 4665 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776872 4665 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.776878 4665 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.776890 4665 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.782671 4665 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.782699 4665 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782768 4665 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782778 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782782 4665 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782786 4665 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782791 4665 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782794 4665 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782798 4665 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782801 4665 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782805 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782809 4665 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782812 4665 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782816 4665 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782819 4665 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782823 4665 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782827 4665 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782830 4665 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782834 4665 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782838 4665 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782841 4665 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782845 4665 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782849 4665 feature_gate.go:330] unrecognized feature gate: Example Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782853 4665 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782856 4665 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782860 4665 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782865 4665 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782869 4665 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782873 4665 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782878 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782881 4665 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782885 4665 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782889 4665 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782893 4665 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782897 4665 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782900 4665 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782905 4665 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782909 4665 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782913 4665 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782916 4665 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782919 4665 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782923 4665 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782926 4665 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782930 4665 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782934 4665 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782937 4665 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782941 4665 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782944 4665 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782948 4665 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782951 4665 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782955 4665 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782958 4665 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782962 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782965 4665 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782969 4665 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782972 4665 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782976 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782979 4665 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782984 4665 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782989 4665 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782993 4665 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.782997 4665 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783000 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783004 4665 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783008 4665 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783012 4665 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783015 4665 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783018 4665 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783023 4665 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783027 4665 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783031 4665 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783035 4665 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783039 4665 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.783046 4665 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783167 4665 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783172 4665 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783176 4665 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783179 4665 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783183 4665 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783187 4665 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783191 4665 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783195 4665 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783200 4665 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783204 4665 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783209 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783213 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783217 4665 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783221 4665 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783224 4665 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783228 4665 feature_gate.go:330] unrecognized feature gate: Example Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783232 4665 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783235 4665 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783239 4665 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783243 4665 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783248 4665 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783253 4665 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783256 4665 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783260 4665 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783263 4665 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783267 4665 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783271 4665 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783274 4665 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783278 4665 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783281 4665 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783284 4665 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783288 4665 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783295 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783298 4665 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783302 4665 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783306 4665 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783310 4665 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783324 4665 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783329 4665 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783332 4665 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783336 4665 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783339 4665 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783343 4665 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783346 4665 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783350 4665 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783354 4665 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783357 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783361 4665 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783364 4665 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783367 4665 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783371 4665 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783374 4665 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783378 4665 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783383 4665 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783388 4665 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783406 4665 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783410 4665 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783415 4665 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783419 4665 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783423 4665 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783426 4665 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783430 4665 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783433 4665 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783437 4665 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783440 4665 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783444 4665 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783447 4665 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783452 4665 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783456 4665 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783460 4665 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.783464 4665 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.783469 4665 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.783618 4665 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.786095 4665 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.786170 4665 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.787034 4665 server.go:997] "Starting client certificate rotation" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.787053 4665 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.787334 4665 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-11-14 01:32:32.692241917 +0000 UTC Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.787394 4665 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.791602 4665 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.792887 4665 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.793691 4665 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.800362 4665 log.go:25] "Validated CRI v1 runtime API" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.815613 4665 log.go:25] "Validated CRI v1 image API" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.816641 4665 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.819406 4665 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-01-03-46-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.819476 4665 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.830961 4665 manager.go:217] Machine: {Timestamp:2025-12-05 01:10:24.829905003 +0000 UTC m=+0.169297332 CPUVendorID:AuthenticAMD NumCores:8 NumPhysicalCores:1 NumSockets:8 CpuFrequency:2799998 MemoryCapacity:25199480832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:58fe2495-9a84-4b13-8fa7-031f802ab624 BootID:488e7d27-a26c-435c-9dd7-9f8a30d10fc0 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:2519945216 Type:vfs Inodes:615221 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:3076108 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:12599738368 Type:vfs Inodes:3076108 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:5039898624 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:12599742464 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:429496729600 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:30:e2:85 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:30:e2:85 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:d6:ed:a6 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:f1:3f:ab Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:c9:a7:88 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:64:e7:8e Speed:-1 Mtu:1496} {Name:eth10 MacAddress:0e:e2:e9:f9:92:4f Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:aa:c3:a2:5a:20:09 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:25199480832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.831182 4665 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.831340 4665 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.831716 4665 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.831899 4665 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.831932 4665 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.832311 4665 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.832345 4665 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.832584 4665 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.832615 4665 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.832891 4665 state_mem.go:36] "Initialized new in-memory state store" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.832988 4665 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.833821 4665 kubelet.go:418] "Attempting to sync node with API server" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.833851 4665 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.833879 4665 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.833894 4665 kubelet.go:324] "Adding apiserver pod source" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.833906 4665 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.835737 4665 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.836373 4665 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.836516 4665 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.836629 4665 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.836791 4665 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.836847 4665 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.837590 4665 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838405 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838494 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838512 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838526 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838548 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838560 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838574 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838595 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838610 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838624 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838649 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838662 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.838818 4665 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.839256 4665 server.go:1280] "Started kubelet" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.839482 4665 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.839535 4665 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.839814 4665 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 01:10:24 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.842518 4665 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.842587 4665 server.go:460] "Adding debug handlers to kubelet server" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.843797 4665 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.843826 4665 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.844032 4665 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 12:57:50.560269362 +0000 UTC Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.844085 4665 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 1067h47m25.716187327s for next certificate rotation Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.843666 4665 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.236:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e2c7e46af584e default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 01:10:24.839211086 +0000 UTC m=+0.178603385,LastTimestamp:2025-12-05 01:10:24.839211086 +0000 UTC m=+0.178603385,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.844236 4665 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.844245 4665 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.844289 4665 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.844350 4665 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.844723 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="200ms" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.844934 4665 factory.go:55] Registering systemd factory Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.844951 4665 factory.go:221] Registration of the systemd container factory successfully Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.845484 4665 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.845529 4665 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.845624 4665 factory.go:153] Registering CRI-O factory Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.845636 4665 factory.go:221] Registration of the crio container factory successfully Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.845692 4665 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.845713 4665 factory.go:103] Registering Raw factory Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.845732 4665 manager.go:1196] Started watching for new ooms in manager Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.846386 4665 manager.go:319] Starting recovery of all containers Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.853969 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854023 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854042 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854056 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854071 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854084 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854096 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854111 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854128 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854139 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854152 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854164 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854176 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854189 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854200 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854210 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854244 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854258 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854269 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854304 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854332 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854348 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854361 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854372 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854384 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854396 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854411 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854423 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854434 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854446 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854460 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854472 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854484 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854527 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854539 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854551 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854562 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854573 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854587 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854600 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854613 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854625 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854637 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854649 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854663 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854678 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854690 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854703 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854715 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854729 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854741 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854755 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854777 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854794 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854809 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854823 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854837 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854849 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854862 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854873 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854888 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854902 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854915 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854927 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854940 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854953 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854965 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854979 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.854993 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855005 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855018 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855032 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855048 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855060 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855073 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855085 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855126 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855142 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855155 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855171 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855183 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855201 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855214 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855228 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855243 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855262 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855284 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855307 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855336 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855349 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855361 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.855377 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856009 4665 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856046 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856065 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856105 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856119 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856138 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856151 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856164 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856176 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856187 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856198 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856209 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856220 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856249 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856263 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856275 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856297 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856312 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856339 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856352 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856364 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856377 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856389 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856399 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856411 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856421 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856434 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856446 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856459 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856472 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856487 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856502 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856517 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856531 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856543 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856555 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856567 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856582 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856598 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856615 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856629 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856642 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856653 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856667 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856679 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856691 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856701 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856714 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856726 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856739 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856753 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856768 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856782 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856802 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856819 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856831 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856843 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856855 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856870 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856883 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856895 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856906 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856919 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856931 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856942 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856954 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856965 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856976 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856987 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.856997 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857007 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857019 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857031 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857042 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857054 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857065 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857076 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857087 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857098 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857110 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857122 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857136 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857149 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857161 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857173 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857184 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857196 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857210 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857221 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857233 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857244 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857255 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857269 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857287 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857303 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857329 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857341 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857353 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857366 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857377 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857389 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857400 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857411 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857425 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857437 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857448 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857460 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857473 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857484 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857494 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857504 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857514 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857526 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857538 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857548 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857558 4665 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857567 4665 reconstruct.go:97] "Volume reconstruction finished" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.857575 4665 reconciler.go:26] "Reconciler: start to sync state" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.870888 4665 manager.go:324] Recovery completed Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.883039 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.884749 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.884780 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.884790 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.885451 4665 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.885474 4665 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.885500 4665 state_mem.go:36] "Initialized new in-memory state store" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.890973 4665 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.892191 4665 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.892219 4665 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.892239 4665 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.892275 4665 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 01:10:24 crc kubenswrapper[4665]: W1205 01:10:24.892857 4665 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.892912 4665 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.942442 4665 policy_none.go:49] "None policy: Start" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.943407 4665 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.943431 4665 state_mem.go:35] "Initializing new in-memory state store" Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.944490 4665 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.984390 4665 manager.go:334] "Starting Device Plugin manager" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.984434 4665 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.984446 4665 server.go:79] "Starting device plugin registration server" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.984792 4665 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.984808 4665 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.985018 4665 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.985100 4665 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.985107 4665 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 01:10:24 crc kubenswrapper[4665]: E1205 01:10:24.991742 4665 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.992923 4665 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.992990 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.993850 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.993879 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.993892 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.994003 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.994562 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.994596 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.994716 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.994741 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.994766 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.994865 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.994974 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995010 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995620 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995637 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995649 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995657 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995692 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995660 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995813 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995640 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995867 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.995878 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.996043 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.996070 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998490 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998535 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998547 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998592 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998620 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998710 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998844 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:24 crc kubenswrapper[4665]: I1205 01:10:24.998888 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.000161 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.000213 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.000229 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.000561 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.000612 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.000813 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.000965 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.001114 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.002001 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.002458 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.003390 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: E1205 01:10:25.045444 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="400ms" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.059731 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.059769 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.059787 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.059804 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.059818 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060047 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060079 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060123 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060147 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060165 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060208 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060230 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060253 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060333 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.060359 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.085084 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.086578 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.086618 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.086629 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.086655 4665 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 01:10:25 crc kubenswrapper[4665]: E1205 01:10:25.087089 4665 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.236:6443: connect: connection refused" node="crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161786 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161826 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161846 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161860 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161875 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161888 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161902 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161918 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161933 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161947 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161963 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161977 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.161991 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162005 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162020 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162352 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162380 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162416 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162415 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162432 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162487 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162510 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162540 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162542 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162394 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162514 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162570 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162579 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162581 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.162600 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.288039 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.289550 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.289599 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.289612 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.289637 4665 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 01:10:25 crc kubenswrapper[4665]: E1205 01:10:25.290151 4665 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.236:6443: connect: connection refused" node="crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.330813 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.338255 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.356914 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: W1205 01:10:25.377171 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-98b786fdd7ad1b5b4a39fc7a3d97f5264df456e73a3d7b4365a0df964c751ac7 WatchSource:0}: Error finding container 98b786fdd7ad1b5b4a39fc7a3d97f5264df456e73a3d7b4365a0df964c751ac7: Status 404 returned error can't find the container with id 98b786fdd7ad1b5b4a39fc7a3d97f5264df456e73a3d7b4365a0df964c751ac7 Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.378195 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: W1205 01:10:25.378403 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-6b4eb251f7c39eeb0b660540e4701ef89c4bc35f25662d77997d4fcd7ce3a0cf WatchSource:0}: Error finding container 6b4eb251f7c39eeb0b660540e4701ef89c4bc35f25662d77997d4fcd7ce3a0cf: Status 404 returned error can't find the container with id 6b4eb251f7c39eeb0b660540e4701ef89c4bc35f25662d77997d4fcd7ce3a0cf Dec 05 01:10:25 crc kubenswrapper[4665]: W1205 01:10:25.384790 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-b0ffcb3431161ecfc7fc2f051662dd59afb89fd5b781cf31a90f0a7cdea2625c WatchSource:0}: Error finding container b0ffcb3431161ecfc7fc2f051662dd59afb89fd5b781cf31a90f0a7cdea2625c: Status 404 returned error can't find the container with id b0ffcb3431161ecfc7fc2f051662dd59afb89fd5b781cf31a90f0a7cdea2625c Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.385748 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:25 crc kubenswrapper[4665]: W1205 01:10:25.402061 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-660cd28609c34a02d7567ff088c793a8282dc07b5ec3d97c1491e27d2c19ab58 WatchSource:0}: Error finding container 660cd28609c34a02d7567ff088c793a8282dc07b5ec3d97c1491e27d2c19ab58: Status 404 returned error can't find the container with id 660cd28609c34a02d7567ff088c793a8282dc07b5ec3d97c1491e27d2c19ab58 Dec 05 01:10:25 crc kubenswrapper[4665]: W1205 01:10:25.413467 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-73e97635f0ea627242956be60b746c7c06a5c099f80ae8748a0d7dd4d21615d4 WatchSource:0}: Error finding container 73e97635f0ea627242956be60b746c7c06a5c099f80ae8748a0d7dd4d21615d4: Status 404 returned error can't find the container with id 73e97635f0ea627242956be60b746c7c06a5c099f80ae8748a0d7dd4d21615d4 Dec 05 01:10:25 crc kubenswrapper[4665]: E1205 01:10:25.446588 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="800ms" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.691003 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.692115 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.692146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.692155 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.692179 4665 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 01:10:25 crc kubenswrapper[4665]: E1205 01:10:25.692630 4665 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.236:6443: connect: connection refused" node="crc" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.840504 4665 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.897586 4665 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754" exitCode=0 Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.897667 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.897763 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"6b4eb251f7c39eeb0b660540e4701ef89c4bc35f25662d77997d4fcd7ce3a0cf"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.897837 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.902316 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.902375 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.902387 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.905949 4665 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a" exitCode=0 Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.906034 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.906087 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"73e97635f0ea627242956be60b746c7c06a5c099f80ae8748a0d7dd4d21615d4"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.906178 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.907732 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.907805 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.907819 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.909751 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.909874 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"660cd28609c34a02d7567ff088c793a8282dc07b5ec3d97c1491e27d2c19ab58"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.911676 4665 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff" exitCode=0 Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.911750 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.911772 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b0ffcb3431161ecfc7fc2f051662dd59afb89fd5b781cf31a90f0a7cdea2625c"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.911902 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.912806 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.912831 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.912839 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.913117 4665 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e" exitCode=0 Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.913143 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.913175 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"98b786fdd7ad1b5b4a39fc7a3d97f5264df456e73a3d7b4365a0df964c751ac7"} Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.913264 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.913954 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.914587 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.914758 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.914836 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.915436 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.915457 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:25 crc kubenswrapper[4665]: I1205 01:10:25.915466 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:25 crc kubenswrapper[4665]: W1205 01:10:25.920544 4665 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:25 crc kubenswrapper[4665]: E1205 01:10:25.920594 4665 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:25 crc kubenswrapper[4665]: W1205 01:10:25.976499 4665 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:25 crc kubenswrapper[4665]: E1205 01:10:25.976577 4665 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:26 crc kubenswrapper[4665]: W1205 01:10:26.220448 4665 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:26 crc kubenswrapper[4665]: E1205 01:10:26.220512 4665 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:26 crc kubenswrapper[4665]: E1205 01:10:26.248149 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="1.6s" Dec 05 01:10:26 crc kubenswrapper[4665]: W1205 01:10:26.444305 4665 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.236:6443: connect: connection refused Dec 05 01:10:26 crc kubenswrapper[4665]: E1205 01:10:26.444435 4665 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.236:6443: connect: connection refused" logger="UnhandledError" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.493247 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.494344 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.494378 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.494386 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.494407 4665 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 01:10:26 crc kubenswrapper[4665]: E1205 01:10:26.494784 4665 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.236:6443: connect: connection refused" node="crc" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.900155 4665 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.917652 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.917712 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.917724 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.917845 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.919152 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.919174 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.919181 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.920262 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.920309 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.920323 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.920381 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.920948 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.920967 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.920974 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.924139 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.924161 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.924171 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.924181 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.924190 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.924266 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.925002 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.925022 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.925029 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.929497 4665 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84" exitCode=0 Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.929579 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.929731 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.930966 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.931023 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.931036 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.932415 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664"} Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.932510 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.933148 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.933165 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:26 crc kubenswrapper[4665]: I1205 01:10:26.933173 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.356181 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.560514 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.936312 4665 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e" exitCode=0 Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.936383 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e"} Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.936447 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.936447 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.936592 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940532 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940567 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940581 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940651 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940671 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940680 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940546 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940714 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:27 crc kubenswrapper[4665]: I1205 01:10:27.940722 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.095760 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.096915 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.096952 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.096963 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.096992 4665 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.320492 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.740171 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.943020 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc"} Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.943056 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89"} Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.943068 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca"} Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.943078 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31"} Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.943234 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.943382 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.944536 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.944570 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.944587 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.944820 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.944867 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:28 crc kubenswrapper[4665]: I1205 01:10:28.944878 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.951128 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.951491 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea"} Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.951555 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.952609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.952655 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.952609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.952694 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.952712 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:29 crc kubenswrapper[4665]: I1205 01:10:29.952673 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.202983 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.350889 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.351149 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.352673 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.352718 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.352737 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.953577 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.955243 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.955331 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:30 crc kubenswrapper[4665]: I1205 01:10:30.955349 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.165976 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.166245 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.167626 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.167653 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.167669 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.173747 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.957807 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.958218 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.958614 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.959795 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.959847 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.959865 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.960116 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.960154 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:31 crc kubenswrapper[4665]: I1205 01:10:31.960171 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:32 crc kubenswrapper[4665]: I1205 01:10:32.961647 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:32 crc kubenswrapper[4665]: I1205 01:10:32.963071 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:32 crc kubenswrapper[4665]: I1205 01:10:32.963136 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:32 crc kubenswrapper[4665]: I1205 01:10:32.963156 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:33 crc kubenswrapper[4665]: I1205 01:10:33.351457 4665 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 01:10:33 crc kubenswrapper[4665]: I1205 01:10:33.351574 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 01:10:34 crc kubenswrapper[4665]: E1205 01:10:34.992472 4665 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 01:10:35 crc kubenswrapper[4665]: I1205 01:10:35.475259 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:10:35 crc kubenswrapper[4665]: I1205 01:10:35.475714 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:35 crc kubenswrapper[4665]: I1205 01:10:35.477018 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:35 crc kubenswrapper[4665]: I1205 01:10:35.477089 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:35 crc kubenswrapper[4665]: I1205 01:10:35.477108 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.237814 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.238011 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.239358 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.239401 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.239412 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.726613 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.726717 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.727823 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.727857 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.727867 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:36 crc kubenswrapper[4665]: I1205 01:10:36.841411 4665 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Dec 05 01:10:36 crc kubenswrapper[4665]: E1205 01:10:36.901997 4665 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": net/http: TLS handshake timeout" logger="UnhandledError" Dec 05 01:10:37 crc kubenswrapper[4665]: I1205 01:10:37.356248 4665 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 01:10:37 crc kubenswrapper[4665]: I1205 01:10:37.356327 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/livez\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 01:10:37 crc kubenswrapper[4665]: I1205 01:10:37.405441 4665 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 01:10:37 crc kubenswrapper[4665]: I1205 01:10:37.405525 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 01:10:41 crc kubenswrapper[4665]: I1205 01:10:41.279725 4665 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Dec 05 01:10:41 crc kubenswrapper[4665]: I1205 01:10:41.295809 4665 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Dec 05 01:10:41 crc kubenswrapper[4665]: I1205 01:10:41.760019 4665 csr.go:261] certificate signing request csr-256bv is approved, waiting to be issued Dec 05 01:10:41 crc kubenswrapper[4665]: I1205 01:10:41.770882 4665 csr.go:257] certificate signing request csr-256bv is issued Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.366427 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.366597 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.367863 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.367903 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.367913 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.371079 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:42 crc kubenswrapper[4665]: E1205 01:10:42.391270 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="3.2s" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.392416 4665 trace.go:236] Trace[1354743594]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 01:10:28.011) (total time: 14380ms): Dec 05 01:10:42 crc kubenswrapper[4665]: Trace[1354743594]: ---"Objects listed" error: 14380ms (01:10:42.392) Dec 05 01:10:42 crc kubenswrapper[4665]: Trace[1354743594]: [14.380576464s] [14.380576464s] END Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.392441 4665 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.397985 4665 trace.go:236] Trace[1709956461]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 01:10:29.187) (total time: 13210ms): Dec 05 01:10:42 crc kubenswrapper[4665]: Trace[1709956461]: ---"Objects listed" error: 13210ms (01:10:42.397) Dec 05 01:10:42 crc kubenswrapper[4665]: Trace[1709956461]: [13.210601537s] [13.210601537s] END Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.398018 4665 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.398365 4665 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.398496 4665 trace.go:236] Trace[963375044]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 01:10:28.604) (total time: 13794ms): Dec 05 01:10:42 crc kubenswrapper[4665]: Trace[963375044]: ---"Objects listed" error: 13794ms (01:10:42.398) Dec 05 01:10:42 crc kubenswrapper[4665]: Trace[963375044]: [13.794255322s] [13.794255322s] END Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.398518 4665 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.398784 4665 trace.go:236] Trace[1364229989]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 01:10:28.812) (total time: 13586ms): Dec 05 01:10:42 crc kubenswrapper[4665]: Trace[1364229989]: ---"Objects listed" error: 13586ms (01:10:42.398) Dec 05 01:10:42 crc kubenswrapper[4665]: Trace[1364229989]: [13.586665897s] [13.586665897s] END Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.398795 4665 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 01:10:42 crc kubenswrapper[4665]: E1205 01:10:42.399830 4665 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.577060 4665 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:59662->192.168.126.11:17697: read: connection reset by peer" start-of-body= Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.577134 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:59662->192.168.126.11:17697: read: connection reset by peer" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.577538 4665 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.577599 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.772488 4665 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-12-05 01:05:41 +0000 UTC, rotation deadline is 2026-09-12 22:13:12.260263789 +0000 UTC Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.772537 4665 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 6765h2m29.487730297s for next certificate rotation Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.844630 4665 apiserver.go:52] "Watching apiserver" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.850486 4665 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.850753 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c"] Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.851046 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.851088 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.851138 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.851154 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:42 crc kubenswrapper[4665]: E1205 01:10:42.851199 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.851252 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:42 crc kubenswrapper[4665]: E1205 01:10:42.851576 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.851687 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:42 crc kubenswrapper[4665]: E1205 01:10:42.851730 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.853197 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.853228 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.854543 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.854777 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.855010 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.855522 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.861908 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.862108 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.862628 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.882740 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.895800 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.905050 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.918508 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.945341 4665 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.946651 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.966941 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.977754 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.986702 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.988858 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.990542 4665 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241" exitCode=255 Dec 05 01:10:42 crc kubenswrapper[4665]: I1205 01:10:42.990573 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241"} Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.001273 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.001690 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.001736 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.001765 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.001790 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.001834 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.001857 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002043 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002077 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002048 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002190 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002200 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002239 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002268 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002289 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002330 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002352 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002375 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002401 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002423 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002443 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002465 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002485 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002507 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002510 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002529 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002551 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002571 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002591 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002612 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002646 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002647 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002694 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002717 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002779 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002828 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002996 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.002993 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003053 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003083 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003134 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003127 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003251 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003274 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003346 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003405 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003459 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003477 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003525 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003556 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003561 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003566 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003589 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003607 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003624 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003644 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003660 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003674 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003687 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003691 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003721 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003741 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003759 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003776 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003791 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003831 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003866 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003887 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003903 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003918 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003934 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003950 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003966 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003982 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.003983 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004019 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004018 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004034 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004043 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004054 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004070 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004085 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004103 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004110 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004119 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004134 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004150 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004167 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004183 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004198 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004213 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004224 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004221 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004228 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004312 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004344 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004376 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004402 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004419 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004427 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004451 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004476 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004497 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004520 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004545 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004571 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004584 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004604 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004632 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004644 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004665 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004692 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004708 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004718 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004743 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004758 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004768 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004814 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004872 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004905 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004929 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004952 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004978 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005005 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005028 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005051 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005075 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005098 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005121 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005142 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005166 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005189 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005211 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005234 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005259 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005284 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005328 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005351 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005376 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005400 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005423 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005444 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005471 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005491 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005514 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005542 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005572 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005594 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005617 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005639 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005660 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005685 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005797 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006557 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004820 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.004896 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006809 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005001 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005011 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005168 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005169 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005258 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005362 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005373 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005380 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005481 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005535 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005604 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005620 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005743 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.005809 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:10:43.505794069 +0000 UTC m=+18.845186358 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.005984 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006023 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006029 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006079 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006119 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006149 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006220 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006239 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006421 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006901 4665 scope.go:117] "RemoveContainer" containerID="0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006585 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006606 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006784 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.006971 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.007026 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.007263 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.007364 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.007967 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.007999 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.008153 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.008474 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.008543 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.008561 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.008903 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009038 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009184 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009080 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009277 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009336 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009342 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009578 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009640 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009670 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009689 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009714 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009732 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009749 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009766 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009783 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009805 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009839 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009859 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009875 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009892 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009909 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009927 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009944 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009959 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009974 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.009990 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010006 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010021 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010036 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010053 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010069 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010088 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010104 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010119 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010138 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010160 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010175 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010190 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010206 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010221 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010236 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010251 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010268 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010284 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010331 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010350 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010374 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010390 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010405 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010421 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010437 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010452 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010468 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010484 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010501 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010516 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010532 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010547 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010562 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010579 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010594 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010611 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010627 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010645 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010660 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010675 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010690 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010707 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010723 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010738 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010754 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010769 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010785 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010807 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010827 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010844 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010867 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010884 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010899 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010914 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010930 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010947 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010980 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.010995 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011011 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011033 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011066 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011086 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011105 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011121 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011137 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011157 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011175 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011200 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011218 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011234 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011250 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011267 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011285 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011318 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011360 4665 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011372 4665 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011381 4665 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011390 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011399 4665 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011408 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011418 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011428 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011437 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011446 4665 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011457 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011637 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011649 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011659 4665 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011672 4665 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011681 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011700 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011710 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011719 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011728 4665 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011737 4665 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011747 4665 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011756 4665 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011765 4665 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011775 4665 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011785 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011797 4665 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011810 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011824 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011834 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011843 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011854 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011864 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011873 4665 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011884 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.011897 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.012488 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.012522 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.019526 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.019733 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.019916 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.020216 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.020415 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.020712 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.020794 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.020966 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.021341 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.012626 4665 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.021409 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.022913 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.023217 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.023304 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.023400 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.023981 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.024362 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.024378 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.020787 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.024935 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.025381 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.025699 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.025927 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.026106 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.026292 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.026721 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.026906 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.027399 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.027741 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.028096 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.028285 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.028607 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.029656 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.030180 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.017000 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.014219 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.018100 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.018211 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.018582 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.018626 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.018887 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.019039 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.019446 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.030527 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.027786 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.032620 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.033592 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.036711 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.037526 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.037814 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.040071 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.039931 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.040720 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.041083 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.041753 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.041757 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.042192 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.042208 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.042351 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.042575 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.043146 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.043147 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.042760 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.043703 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.043742 4665 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.044854 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:43.544839479 +0000 UTC m=+18.884231768 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.044967 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.045192 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.044065 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.044170 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.042749 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.046777 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.045692 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.047992 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.048599 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.044598 4665 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.046580 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.051590 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.059681 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.062512 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.062544 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.062560 4665 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.062662 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.062674 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.062683 4665 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.063771 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.063992 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.066470 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.066645 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:43.566623276 +0000 UTC m=+18.906015575 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.070996 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.066736 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.066768 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.071096 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:43.571056005 +0000 UTC m=+18.910448304 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.071109 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:43.571102996 +0000 UTC m=+18.910495295 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.071396 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.071699 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.072083 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.072203 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.072423 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.073086 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.073378 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.075675 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.075980 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.076011 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.076242 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.076372 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.076536 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.076867 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.076873 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.077145 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.077162 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.077355 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.077444 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.078414 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.081386 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.084345 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.084900 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.085339 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.085500 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.085618 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.085857 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.085935 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.085959 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.085945 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.086134 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.086288 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.086730 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.086741 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.086787 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.087150 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.088931 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.093956 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.094583 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.105033 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.112883 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.112934 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.112978 4665 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.112988 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.112998 4665 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113007 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113016 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113024 4665 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113033 4665 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113042 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113051 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113059 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113069 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113077 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113087 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113125 4665 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113135 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113145 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113154 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113164 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113173 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113182 4665 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113191 4665 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113200 4665 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113209 4665 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113219 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113228 4665 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113236 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113245 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113254 4665 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113263 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113273 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113282 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113303 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113313 4665 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113322 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113331 4665 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113340 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113350 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113359 4665 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113367 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113375 4665 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113383 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113391 4665 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113399 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113407 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113415 4665 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113426 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113434 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113443 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113452 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113460 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113469 4665 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113478 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113486 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113494 4665 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113504 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113513 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113521 4665 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113529 4665 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113537 4665 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113545 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113554 4665 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113562 4665 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113571 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113579 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113588 4665 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113596 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113604 4665 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113611 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113621 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113629 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113637 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113645 4665 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113653 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113661 4665 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113669 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113678 4665 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113686 4665 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113694 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113704 4665 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113712 4665 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113720 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113727 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113735 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113721 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113743 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113847 4665 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113850 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113857 4665 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113866 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113875 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113881 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113885 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113900 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113909 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113917 4665 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113926 4665 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113935 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113943 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113951 4665 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113959 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113967 4665 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113975 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113983 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.113992 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114005 4665 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114014 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114021 4665 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114029 4665 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114036 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114044 4665 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114052 4665 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114060 4665 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114068 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114076 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114084 4665 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114091 4665 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114099 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114108 4665 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114115 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114123 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114130 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114138 4665 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114146 4665 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114155 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114163 4665 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114171 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114179 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114187 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114195 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114202 4665 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114211 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114219 4665 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114227 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114234 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114242 4665 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114249 4665 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114258 4665 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114266 4665 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114274 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114282 4665 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114314 4665 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114323 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114331 4665 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114339 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114347 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114355 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114363 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114371 4665 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114380 4665 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114389 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114398 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114406 4665 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114415 4665 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114424 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114432 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114441 4665 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114449 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114456 4665 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114465 4665 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114473 4665 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.114481 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.126672 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.136270 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.164043 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.171511 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.179314 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 01:10:43 crc kubenswrapper[4665]: W1205 01:10:43.193182 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-f5ff61bdb0ec39f7b00749b63bc6f3b95cad6d8b97f67ce03ffd5df1a92d0577 WatchSource:0}: Error finding container f5ff61bdb0ec39f7b00749b63bc6f3b95cad6d8b97f67ce03ffd5df1a92d0577: Status 404 returned error can't find the container with id f5ff61bdb0ec39f7b00749b63bc6f3b95cad6d8b97f67ce03ffd5df1a92d0577 Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.352483 4665 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.352923 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.520309 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.520387 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:10:44.520368435 +0000 UTC m=+19.859760734 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.620758 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.620801 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.620828 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.620852 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.620896 4665 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.620965 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:44.620945978 +0000 UTC m=+19.960338287 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.620972 4665 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621027 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:44.621011109 +0000 UTC m=+19.960403458 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621062 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621077 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621089 4665 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621100 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621113 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621117 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:44.621108663 +0000 UTC m=+19.960501042 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621126 4665 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:43 crc kubenswrapper[4665]: E1205 01:10:43.621156 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:44.621146494 +0000 UTC m=+19.960538873 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.661915 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-cps4h"] Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.662264 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cps4h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.664756 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.664764 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.664756 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.665995 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-rgbtc"] Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.666259 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.668527 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.669252 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.669657 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.671016 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.671026 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.698953 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.721367 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-mcd-auth-proxy-config\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.721402 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/91c7cec2-2aab-4cab-a055-c7994cb11d17-hosts-file\") pod \"node-resolver-cps4h\" (UID: \"91c7cec2-2aab-4cab-a055-c7994cb11d17\") " pod="openshift-dns/node-resolver-cps4h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.721440 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rblk6\" (UniqueName: \"kubernetes.io/projected/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-kube-api-access-rblk6\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.721481 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-rootfs\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.721496 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtwqt\" (UniqueName: \"kubernetes.io/projected/91c7cec2-2aab-4cab-a055-c7994cb11d17-kube-api-access-mtwqt\") pod \"node-resolver-cps4h\" (UID: \"91c7cec2-2aab-4cab-a055-c7994cb11d17\") " pod="openshift-dns/node-resolver-cps4h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.721512 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-proxy-tls\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.722156 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.737925 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.750127 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.780898 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.807972 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.821822 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtwqt\" (UniqueName: \"kubernetes.io/projected/91c7cec2-2aab-4cab-a055-c7994cb11d17-kube-api-access-mtwqt\") pod \"node-resolver-cps4h\" (UID: \"91c7cec2-2aab-4cab-a055-c7994cb11d17\") " pod="openshift-dns/node-resolver-cps4h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.821867 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-proxy-tls\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.821888 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-mcd-auth-proxy-config\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.821909 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/91c7cec2-2aab-4cab-a055-c7994cb11d17-hosts-file\") pod \"node-resolver-cps4h\" (UID: \"91c7cec2-2aab-4cab-a055-c7994cb11d17\") " pod="openshift-dns/node-resolver-cps4h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.821945 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rblk6\" (UniqueName: \"kubernetes.io/projected/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-kube-api-access-rblk6\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.821980 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-rootfs\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.822022 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-rootfs\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.822076 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/91c7cec2-2aab-4cab-a055-c7994cb11d17-hosts-file\") pod \"node-resolver-cps4h\" (UID: \"91c7cec2-2aab-4cab-a055-c7994cb11d17\") " pod="openshift-dns/node-resolver-cps4h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.822575 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-mcd-auth-proxy-config\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.825508 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.826092 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-proxy-tls\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.835649 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtwqt\" (UniqueName: \"kubernetes.io/projected/91c7cec2-2aab-4cab-a055-c7994cb11d17-kube-api-access-mtwqt\") pod \"node-resolver-cps4h\" (UID: \"91c7cec2-2aab-4cab-a055-c7994cb11d17\") " pod="openshift-dns/node-resolver-cps4h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.837631 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.837914 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rblk6\" (UniqueName: \"kubernetes.io/projected/dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14-kube-api-access-rblk6\") pod \"machine-config-daemon-rgbtc\" (UID: \"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\") " pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.846175 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.855093 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.865273 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.878188 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.891220 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.904139 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.919220 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.931447 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.945286 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:43Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.973431 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-cps4h" Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.979405 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:10:43 crc kubenswrapper[4665]: W1205 01:10:43.983495 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91c7cec2_2aab_4cab_a055_c7994cb11d17.slice/crio-4f5fcfb025aa97a0e0f6442c36c94910d696140c4f85ee9a0c3decad8764f350 WatchSource:0}: Error finding container 4f5fcfb025aa97a0e0f6442c36c94910d696140c4f85ee9a0c3decad8764f350: Status 404 returned error can't find the container with id 4f5fcfb025aa97a0e0f6442c36c94910d696140c4f85ee9a0c3decad8764f350 Dec 05 01:10:43 crc kubenswrapper[4665]: W1205 01:10:43.990752 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddbc5a2ac_b7ae_4f75_bdec_f4f0f4948a14.slice/crio-1f2936bf3cb1fc6d5e18abe8fa95c45e0b1deefe885b664b490061cc7055bce5 WatchSource:0}: Error finding container 1f2936bf3cb1fc6d5e18abe8fa95c45e0b1deefe885b664b490061cc7055bce5: Status 404 returned error can't find the container with id 1f2936bf3cb1fc6d5e18abe8fa95c45e0b1deefe885b664b490061cc7055bce5 Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.992835 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cps4h" event={"ID":"91c7cec2-2aab-4cab-a055-c7994cb11d17","Type":"ContainerStarted","Data":"4f5fcfb025aa97a0e0f6442c36c94910d696140c4f85ee9a0c3decad8764f350"} Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.993502 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f5ff61bdb0ec39f7b00749b63bc6f3b95cad6d8b97f67ce03ffd5df1a92d0577"} Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.995479 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f"} Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.995500 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46"} Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.995510 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"26a46d509bb0adc5d79518a983854c0677831e3f34b4adf4d822b698a870e363"} Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.997195 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53"} Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.997323 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"c94d75e52381f9517163d4466105f0a5f841e3aca17d26d6c5be2db140e75be9"} Dec 05 01:10:43 crc kubenswrapper[4665]: I1205 01:10:43.999719 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.001307 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420"} Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.001532 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.013146 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.023649 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.045035 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.066741 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.066893 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-jl867"] Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.067477 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-j22m9"] Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.067648 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.067797 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.069674 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.069778 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.069781 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.069843 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.069906 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.069935 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.069979 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.082384 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.113660 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.123587 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.123919 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-netns\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.123947 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-etc-kubernetes\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.123964 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.123990 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-daemon-config\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124006 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-cnibin\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124019 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-os-release\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124043 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-multus-certs\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124057 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-conf-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124072 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cni-binary-copy\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124086 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-cni-multus\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124108 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-cni-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124128 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9km6c\" (UniqueName: \"kubernetes.io/projected/be34b4a6-0156-4e21-bae6-12af18583b0d-kube-api-access-9km6c\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124162 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-system-cni-dir\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124176 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124192 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-k8s-cni-cncf-io\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124222 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j2qs5\" (UniqueName: \"kubernetes.io/projected/3391189b-95c4-4746-8aea-1d3be0b4ae1a-kube-api-access-j2qs5\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124236 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-kubelet\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124249 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-hostroot\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124263 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-os-release\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.124287 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/be34b4a6-0156-4e21-bae6-12af18583b0d-cni-binary-copy\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.125045 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-socket-dir-parent\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.125077 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cnibin\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.125092 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-cni-bin\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.125105 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-system-cni-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.144041 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.158006 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.174514 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.192841 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.206570 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.223795 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.225898 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cnibin\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.225935 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-cni-bin\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.225957 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-system-cni-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.225979 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-etc-kubernetes\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226001 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226023 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-netns\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226045 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-daemon-config\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226064 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-os-release\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226069 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cnibin\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226140 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-cni-bin\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226152 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-cnibin\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226087 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-cnibin\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226197 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-conf-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226200 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-system-cni-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226216 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-multus-certs\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226237 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-etc-kubernetes\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226240 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cni-binary-copy\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226281 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-cni-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226323 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-cni-multus\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226373 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-system-cni-dir\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226395 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9km6c\" (UniqueName: \"kubernetes.io/projected/be34b4a6-0156-4e21-bae6-12af18583b0d-kube-api-access-9km6c\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226415 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226435 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-k8s-cni-cncf-io\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226456 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j2qs5\" (UniqueName: \"kubernetes.io/projected/3391189b-95c4-4746-8aea-1d3be0b4ae1a-kube-api-access-j2qs5\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226474 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-kubelet\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226494 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-os-release\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226511 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/be34b4a6-0156-4e21-bae6-12af18583b0d-cni-binary-copy\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226516 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-cni-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226527 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-hostroot\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226555 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-hostroot\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226573 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-socket-dir-parent\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226586 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-cni-multus\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226631 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-system-cni-dir\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226815 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cni-binary-copy\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226844 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-tuning-conf-dir\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226851 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-conf-dir\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226875 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-multus-certs\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226878 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-netns\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226909 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-socket-dir-parent\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226933 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-var-lib-kubelet\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.226954 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-host-run-k8s-cni-cncf-io\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.227024 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/be34b4a6-0156-4e21-bae6-12af18583b0d-os-release\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.227142 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/3391189b-95c4-4746-8aea-1d3be0b4ae1a-os-release\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.227252 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/3391189b-95c4-4746-8aea-1d3be0b4ae1a-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.227582 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/be34b4a6-0156-4e21-bae6-12af18583b0d-multus-daemon-config\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.227698 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/be34b4a6-0156-4e21-bae6-12af18583b0d-cni-binary-copy\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.237805 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.248637 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9km6c\" (UniqueName: \"kubernetes.io/projected/be34b4a6-0156-4e21-bae6-12af18583b0d-kube-api-access-9km6c\") pod \"multus-j22m9\" (UID: \"be34b4a6-0156-4e21-bae6-12af18583b0d\") " pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.255508 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j2qs5\" (UniqueName: \"kubernetes.io/projected/3391189b-95c4-4746-8aea-1d3be0b4ae1a-kube-api-access-j2qs5\") pod \"multus-additional-cni-plugins-jl867\" (UID: \"3391189b-95c4-4746-8aea-1d3be0b4ae1a\") " pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.265150 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.278189 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.297331 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.326255 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.342183 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.359259 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.379462 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-j22m9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.386356 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-jl867" Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.419021 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3391189b_95c4_4746_8aea_1d3be0b4ae1a.slice/crio-446b7854b42fe1f57f60903d882f146bac06f52707569c34f62d54602a788c27 WatchSource:0}: Error finding container 446b7854b42fe1f57f60903d882f146bac06f52707569c34f62d54602a788c27: Status 404 returned error can't find the container with id 446b7854b42fe1f57f60903d882f146bac06f52707569c34f62d54602a788c27 Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.420650 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbe34b4a6_0156_4e21_bae6_12af18583b0d.slice/crio-6345227f7b77ac31a89abc73a0bb7779ab8190ca1d729d47086da31824cfed27 WatchSource:0}: Error finding container 6345227f7b77ac31a89abc73a0bb7779ab8190ca1d729d47086da31824cfed27: Status 404 returned error can't find the container with id 6345227f7b77ac31a89abc73a0bb7779ab8190ca1d729d47086da31824cfed27 Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.482725 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2bmn9"] Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.483742 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.486956 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.487423 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.487819 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.493855 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.494016 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.494600 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.501142 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.523187 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.528439 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.528571 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:10:46.528550441 +0000 UTC m=+21.867942740 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.570682 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.623489 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629039 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-systemd-units\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629072 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-slash\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629090 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-var-lib-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629110 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-ovn-kubernetes\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629125 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629139 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-netns\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629155 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629176 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovn-node-metrics-cert\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629202 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629220 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629236 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-etc-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629254 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-netd\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629270 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bklrv\" (UniqueName: \"kubernetes.io/projected/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-kube-api-access-bklrv\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629377 4665 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629437 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:46.629422001 +0000 UTC m=+21.968814290 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629444 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629459 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629469 4665 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629506 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:46.629491773 +0000 UTC m=+21.968884072 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629288 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629580 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629840 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629849 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629856 4665 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629861 4665 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629876 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:46.629870092 +0000 UTC m=+21.969262391 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.629892 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:46.629883582 +0000 UTC m=+21.969275881 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629597 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-env-overrides\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.629980 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-kubelet\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.630002 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-log-socket\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.630018 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-bin\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.630038 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-config\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.630064 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-script-lib\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.630080 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-systemd\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.630095 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-node-log\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.630111 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-ovn\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.648470 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.687810 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.719283 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.730926 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-netd\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.730969 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bklrv\" (UniqueName: \"kubernetes.io/projected/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-kube-api-access-bklrv\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731068 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-netd\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731273 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-env-overrides\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731328 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-kubelet\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731347 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-log-socket\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731364 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-bin\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731433 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-log-socket\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731425 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-bin\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731455 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-config\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731404 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-kubelet\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731474 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-script-lib\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731674 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-systemd\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.731712 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-env-overrides\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732115 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-config\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732152 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-script-lib\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732194 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-systemd\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732230 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-node-log\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732247 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-ovn\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732260 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-systemd-units\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732348 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-ovn\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732332 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-systemd-units\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732350 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-slash\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732369 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-slash\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732411 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-var-lib-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732309 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-node-log\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732463 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-var-lib-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732494 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-ovn-kubernetes\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732511 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732528 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-netns\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732534 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732545 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732567 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-ovn-kubernetes\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732578 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovn-node-metrics-cert\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732593 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-netns\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732612 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-etc-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732618 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.732681 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-etc-openvswitch\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.737726 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovn-node-metrics-cert\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.753818 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bklrv\" (UniqueName: \"kubernetes.io/projected/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-kube-api-access-bklrv\") pod \"ovnkube-node-2bmn9\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.757581 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.787226 4665 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.787980 4665 reflector.go:484] object-"openshift-multus"/"default-dockercfg-2q5b6": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"default-dockercfg-2q5b6": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.788170 4665 reflector.go:484] object-"openshift-multus"/"cni-copy-resources": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"cni-copy-resources": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.788333 4665 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-script-lib": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.788674 4665 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.788755 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Patch \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-network-operator/pods/iptables-alerter-4ln5h/status\": read tcp 38.102.83.236:40754->38.102.83.236:6443: use of closed network connection" Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.789002 4665 reflector.go:484] object-"openshift-multus"/"multus-daemon-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"multus-daemon-config": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.789090 4665 reflector.go:484] object-"openshift-multus"/"default-cni-sysctl-allowlist": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"default-cni-sysctl-allowlist": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.789249 4665 reflector.go:484] object-"openshift-multus"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.789278 4665 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": watch of *v1.Secret ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.789569 4665 reflector.go:484] object-"openshift-ovn-kubernetes"/"env-overrides": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"env-overrides": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.789597 4665 reflector.go:484] object-"openshift-ovn-kubernetes"/"ovnkube-config": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"ovnkube-config": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.790071 4665 reflector.go:484] object-"openshift-multus"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-multus"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.790122 4665 reflector.go:484] object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": watch of *v1.Secret ended with: very short watch: object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.790467 4665 reflector.go:484] object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"kube-root-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: W1205 01:10:44.790542 4665 reflector.go:484] object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": watch of *v1.ConfigMap ended with: very short watch: object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt": Unexpected watch close - watch lasted less than a second and no items received Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.797086 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.833829 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.860088 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.887402 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.892726 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.892782 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.892802 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.892841 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.892893 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:44 crc kubenswrapper[4665]: E1205 01:10:44.893015 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.899325 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.899950 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.901174 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.901935 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.902917 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.903445 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.904001 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.907718 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.908431 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.909706 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.910206 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.916764 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.917505 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.918024 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.919028 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.919564 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.925910 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.926630 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.927185 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.928479 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.928950 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.933856 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.934399 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.935518 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.936089 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.936691 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.937912 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.938419 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.939576 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.940046 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.941429 4665 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.941529 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.943441 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.947112 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.947711 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.954996 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.955785 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.956727 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.957466 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.958687 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.959164 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.960114 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.960750 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.961820 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.965028 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.965584 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.966511 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.967081 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.971733 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.972242 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.973171 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.973702 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.974613 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.975144 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.975596 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 01:10:44 crc kubenswrapper[4665]: I1205 01:10:44.988095 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.008507 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.014094 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-cps4h" event={"ID":"91c7cec2-2aab-4cab-a055-c7994cb11d17","Type":"ContainerStarted","Data":"6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.016024 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerStarted","Data":"3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.016196 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerStarted","Data":"446b7854b42fe1f57f60903d882f146bac06f52707569c34f62d54602a788c27"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.019603 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j22m9" event={"ID":"be34b4a6-0156-4e21-bae6-12af18583b0d","Type":"ContainerStarted","Data":"84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.019627 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j22m9" event={"ID":"be34b4a6-0156-4e21-bae6-12af18583b0d","Type":"ContainerStarted","Data":"6345227f7b77ac31a89abc73a0bb7779ab8190ca1d729d47086da31824cfed27"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.024812 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"7dfc40eeea11eac4d19ef0cef9577075dab1a931800b72d63d2d8edd732d9ff5"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.045119 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.045156 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.045165 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"1f2936bf3cb1fc6d5e18abe8fa95c45e0b1deefe885b664b490061cc7055bce5"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.053604 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.077507 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.114571 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.133888 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.149783 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.173630 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.187983 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.204507 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.219188 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.234140 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.247923 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.258876 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.276752 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.289500 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.302392 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.319397 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.333271 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.344198 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.357112 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.371906 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.385221 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.402478 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.600480 4665 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.602154 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.602197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.602211 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.602401 4665 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.612792 4665 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.613005 4665 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.613965 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.613995 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.614005 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.614021 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.614033 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:45Z","lastTransitionTime":"2025-12-05T01:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:45 crc kubenswrapper[4665]: E1205 01:10:45.666994 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.673630 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.673663 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.673672 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.673686 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.673695 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:45Z","lastTransitionTime":"2025-12-05T01:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:45 crc kubenswrapper[4665]: E1205 01:10:45.692423 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.692657 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.698712 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.698740 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.698749 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.698761 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.698770 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:45Z","lastTransitionTime":"2025-12-05T01:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:45 crc kubenswrapper[4665]: E1205 01:10:45.711493 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.714368 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.714400 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.714410 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.714423 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.714432 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:45Z","lastTransitionTime":"2025-12-05T01:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:45 crc kubenswrapper[4665]: E1205 01:10:45.725351 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.727896 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.727926 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.727936 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.727950 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.727966 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:45Z","lastTransitionTime":"2025-12-05T01:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.734209 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 01:10:45 crc kubenswrapper[4665]: E1205 01:10:45.737908 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:45 crc kubenswrapper[4665]: E1205 01:10:45.738057 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.739634 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.739658 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.739666 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.739679 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.739696 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:45Z","lastTransitionTime":"2025-12-05T01:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.751684 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.777810 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.836972 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.841800 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.841840 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.841851 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.841867 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.841876 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:45Z","lastTransitionTime":"2025-12-05T01:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.874002 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.907162 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.943816 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.943842 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.943850 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.943863 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.943871 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:45Z","lastTransitionTime":"2025-12-05T01:10:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.951931 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 01:10:45 crc kubenswrapper[4665]: I1205 01:10:45.982101 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.046018 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.046059 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.046070 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.046087 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.046098 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.047954 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f" exitCode=0 Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.048008 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.050015 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.051892 4665 generic.go:334] "Generic (PLEG): container finished" podID="3391189b-95c4-4746-8aea-1d3be0b4ae1a" containerID="3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f" exitCode=0 Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.051916 4665 generic.go:334] "Generic (PLEG): container finished" podID="3391189b-95c4-4746-8aea-1d3be0b4ae1a" containerID="0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86" exitCode=0 Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.051935 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.052148 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerDied","Data":"3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.052196 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerDied","Data":"0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.077939 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.093481 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.110654 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.112018 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.120808 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.142265 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.157503 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.157537 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.157548 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.157568 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.157582 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.162356 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.180914 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.193706 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.196822 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.214658 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.228455 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.229266 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.248353 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.262000 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.262044 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.262056 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.262074 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.262084 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.262963 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.265639 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.280393 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.281692 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.284771 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.290798 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.313436 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.330181 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.331101 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.344541 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.356642 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.364908 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.364942 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.364952 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.364971 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.364982 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.373428 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.413005 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.453238 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.466645 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.466711 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.466723 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.466737 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.466747 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.492885 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.530757 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.553611 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.553724 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:10:50.553685577 +0000 UTC m=+25.893077866 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.570628 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.570656 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.570665 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.570680 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.570691 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.577210 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.619285 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.654921 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655022 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.655039 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.655084 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655057 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655127 4665 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655172 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:50.655159602 +0000 UTC m=+25.994551901 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.655195 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655108 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655240 4665 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655262 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:50.655256175 +0000 UTC m=+25.994648474 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655269 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655291 4665 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655304 4665 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655393 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:50.655350967 +0000 UTC m=+25.994743266 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.655407 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:50.655401229 +0000 UTC m=+25.994793628 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.658538 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.673608 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.673643 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.673653 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.673669 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.673680 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.693277 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.733039 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.775960 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.776197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.776327 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.776417 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.776531 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.778502 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.814268 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.851870 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.879473 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.879505 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.879514 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.879530 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.879550 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.892468 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.892517 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.892517 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.892876 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.892735 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:46 crc kubenswrapper[4665]: E1205 01:10:46.892919 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.895077 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.932893 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.975142 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:46Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.981849 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.981885 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.981895 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.981911 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:46 crc kubenswrapper[4665]: I1205 01:10:46.981922 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:46Z","lastTransitionTime":"2025-12-05T01:10:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.012081 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.051643 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.057186 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.057226 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.057256 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.057268 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.057277 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.057287 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.059047 4665 generic.go:334] "Generic (PLEG): container finished" podID="3391189b-95c4-4746-8aea-1d3be0b4ae1a" containerID="310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6" exitCode=0 Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.059118 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerDied","Data":"310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.085892 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.086481 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.086517 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.086542 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.086553 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.096561 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.142600 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.179755 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.196960 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.197001 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.197014 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.197030 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.197041 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.232635 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.261881 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.297649 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.298898 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.298930 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.298941 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.298958 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.298970 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.333143 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.376364 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.400858 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.400893 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.400904 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.400919 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.400929 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.414356 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.453085 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.492840 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.503365 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.503407 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.503416 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.503431 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.503449 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.531872 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.571521 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.604998 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.605027 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.605036 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.605049 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.605058 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.611717 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.706711 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.706743 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.706751 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.706765 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.706773 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.809619 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.809863 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.809877 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.809895 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.809906 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.912617 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.913145 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.913260 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.913374 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:47 crc kubenswrapper[4665]: I1205 01:10:47.913466 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:47Z","lastTransitionTime":"2025-12-05T01:10:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.016056 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.016320 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.016403 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.016940 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.016970 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.064806 4665 generic.go:334] "Generic (PLEG): container finished" podID="3391189b-95c4-4746-8aea-1d3be0b4ae1a" containerID="fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e" exitCode=0 Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.064853 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerDied","Data":"fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.081913 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.097830 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.113113 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.120533 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.120576 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.120588 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.120605 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.120617 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.126891 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.139675 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.152074 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.168350 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.179433 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.192075 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.205143 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.218486 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.223488 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.223529 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.223541 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.223558 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.223567 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.240047 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.258429 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:48Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.325815 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.325851 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.325859 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.325873 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.325882 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.428145 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.428185 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.428197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.428218 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.428230 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.531457 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.531496 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.531505 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.531520 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.531532 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.633687 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.633722 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.633734 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.633753 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.633765 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.736537 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.736573 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.736581 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.736594 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.736603 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.838793 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.838839 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.838848 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.838865 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.838875 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.893631 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.893673 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.893787 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:48 crc kubenswrapper[4665]: E1205 01:10:48.894044 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:48 crc kubenswrapper[4665]: E1205 01:10:48.894160 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:48 crc kubenswrapper[4665]: E1205 01:10:48.893968 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.942424 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.942486 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.942500 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.942530 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:48 crc kubenswrapper[4665]: I1205 01:10:48.942547 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:48Z","lastTransitionTime":"2025-12-05T01:10:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.045932 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.045998 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.046012 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.046033 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.046048 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.074628 4665 generic.go:334] "Generic (PLEG): container finished" podID="3391189b-95c4-4746-8aea-1d3be0b4ae1a" containerID="b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f" exitCode=0 Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.074768 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerDied","Data":"b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.088935 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.105627 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.131059 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.147015 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.149617 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.149706 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.149728 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.149770 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.149792 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.175533 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.191265 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.211516 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.234340 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.250201 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.254394 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.254433 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.254450 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.254475 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.254493 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.264659 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.281427 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.295648 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.314459 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.333312 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:49Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.357283 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.357329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.357338 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.357353 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.357363 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.460108 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.460140 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.460150 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.460165 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.460175 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.562705 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.562907 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.562968 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.563026 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.563078 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.665713 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.665952 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.666008 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.666063 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.666114 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.768267 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.769419 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.769753 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.769956 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.770169 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.872666 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.872706 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.872718 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.872733 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.872744 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.974674 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.974706 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.974717 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.974731 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:49 crc kubenswrapper[4665]: I1205 01:10:49.974739 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:49Z","lastTransitionTime":"2025-12-05T01:10:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.077065 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.077099 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.077108 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.077120 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.077128 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.094844 4665 generic.go:334] "Generic (PLEG): container finished" podID="3391189b-95c4-4746-8aea-1d3be0b4ae1a" containerID="7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe" exitCode=0 Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.094889 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerDied","Data":"7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.115360 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.128319 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.152172 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.173152 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.179137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.179169 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.179180 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.179197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.179211 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.188093 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.200821 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.218028 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.233095 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.243422 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.253761 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.264778 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.274752 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.281622 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.281654 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.281662 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.281674 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.281685 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.286085 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.385910 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.385974 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.385988 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.386009 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.386027 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.488400 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.488435 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.488445 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.488460 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.488489 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.584072 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-mmcgt"] Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.584439 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.586364 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.586636 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.586995 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.587745 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.591165 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.591203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.591215 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.591231 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.591242 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.596954 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.597114 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:10:58.597092012 +0000 UTC m=+33.936484311 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.598066 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.601827 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.603003 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.607665 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.614684 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.631767 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.642516 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.663605 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.679043 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.693316 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.693480 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.693515 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.693525 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.693544 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.693557 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.697735 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.697968 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kshh4\" (UniqueName: \"kubernetes.io/projected/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-kube-api-access-kshh4\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.697995 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.698019 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.698040 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.698069 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-host\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.698092 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-serviceca\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698091 4665 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698160 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:58.698144147 +0000 UTC m=+34.037536446 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698177 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698194 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698225 4665 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698259 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:58.698249559 +0000 UTC m=+34.037641858 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698339 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698352 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698361 4665 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698388 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:58.698379082 +0000 UTC m=+34.037771381 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698449 4665 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.698481 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:58.698472014 +0000 UTC m=+34.037864313 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.709395 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.729242 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.744610 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.757825 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.772988 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.788032 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.795926 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.795953 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.795962 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.795977 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.795987 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.799529 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kshh4\" (UniqueName: \"kubernetes.io/projected/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-kube-api-access-kshh4\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.799605 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-host\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.799635 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-serviceca\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.799776 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-host\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.800831 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-serviceca\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.801661 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.822961 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kshh4\" (UniqueName: \"kubernetes.io/projected/a4ff6abc-ff7f-425a-a60e-b8923b7294a5-kube-api-access-kshh4\") pod \"node-ca-mmcgt\" (UID: \"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\") " pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.826071 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.840142 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.852818 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.867687 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.881250 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.892725 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.892761 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.892801 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.892868 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.892957 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:50 crc kubenswrapper[4665]: E1205 01:10:50.893060 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.897595 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.897630 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.897641 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.897655 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.897670 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.898546 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-mmcgt" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.903527 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: W1205 01:10:50.912507 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4ff6abc_ff7f_425a_a60e_b8923b7294a5.slice/crio-942f3e57067a088ba7f3fb6148641352b2f1c87e9e432d2a6be6ef57209c0fe9 WatchSource:0}: Error finding container 942f3e57067a088ba7f3fb6148641352b2f1c87e9e432d2a6be6ef57209c0fe9: Status 404 returned error can't find the container with id 942f3e57067a088ba7f3fb6148641352b2f1c87e9e432d2a6be6ef57209c0fe9 Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.921580 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.957682 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.999700 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.999745 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.999758 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:50 crc kubenswrapper[4665]: I1205 01:10:50.999787 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:50.999800 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:50Z","lastTransitionTime":"2025-12-05T01:10:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.004073 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:50Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.037391 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.055421 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.076139 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.094111 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.101971 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.102010 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.102022 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.102039 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.102052 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.103334 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" event={"ID":"3391189b-95c4-4746-8aea-1d3be0b4ae1a","Type":"ContainerStarted","Data":"591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.107581 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.108378 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.108937 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-mmcgt" event={"ID":"a4ff6abc-ff7f-425a-a60e-b8923b7294a5","Type":"ContainerStarted","Data":"942f3e57067a088ba7f3fb6148641352b2f1c87e9e432d2a6be6ef57209c0fe9"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.117326 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.129941 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.142669 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.153362 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.161948 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.176008 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.186893 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.203835 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.204355 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.204379 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.204388 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.204400 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.204409 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.223230 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.235997 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.248956 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.262013 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.278755 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.297458 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.306713 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.306765 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.306778 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.306798 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.306811 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.312601 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.326464 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:51Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.408893 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.408928 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.408938 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.408955 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.408967 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.512598 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.512669 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.512683 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.512701 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.512712 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.620055 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.620093 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.620101 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.620113 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.620121 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.722365 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.722405 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.722415 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.722431 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.722441 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.825807 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.825852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.825860 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.825902 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.825914 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.928767 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.928818 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.928830 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.928848 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:51 crc kubenswrapper[4665]: I1205 01:10:51.929185 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:51Z","lastTransitionTime":"2025-12-05T01:10:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.031067 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.031107 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.031118 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.031135 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.031146 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.113367 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-mmcgt" event={"ID":"a4ff6abc-ff7f-425a-a60e-b8923b7294a5","Type":"ContainerStarted","Data":"0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.113827 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.113866 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.113879 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.135044 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.136785 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.136829 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.136840 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.136860 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.136874 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.143065 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.147964 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.153493 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.173896 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.192444 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.206021 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.222096 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.233958 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.238826 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.238862 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.238871 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.238886 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.238900 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.249079 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.261663 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.274105 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.287086 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.298967 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.315713 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.326165 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.341317 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.341354 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.341366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.341383 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.341395 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.345362 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.357491 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.371688 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.381653 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.403589 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.421809 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.436647 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.444733 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.444787 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.444799 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.444814 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.444825 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.453395 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.466617 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.480754 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.495896 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.508020 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.519454 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.532392 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.544287 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.547089 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.547121 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.547133 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.547151 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.547164 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.557038 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:52Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.649962 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.650002 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.650013 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.650027 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.650037 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.751859 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.751928 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.751942 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.751961 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.751973 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.854360 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.854399 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.854407 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.854421 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.854434 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.892934 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.892966 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.893027 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:52 crc kubenswrapper[4665]: E1205 01:10:52.893087 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:52 crc kubenswrapper[4665]: E1205 01:10:52.893165 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:52 crc kubenswrapper[4665]: E1205 01:10:52.893229 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.956856 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.956895 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.956904 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.956923 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:52 crc kubenswrapper[4665]: I1205 01:10:52.956932 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:52Z","lastTransitionTime":"2025-12-05T01:10:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.059052 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.059355 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.059459 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.059594 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.059704 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.162933 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.162969 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.162980 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.162994 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.163003 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.265853 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.265895 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.265908 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.265924 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.265935 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.367837 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.367868 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.367877 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.367891 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.367900 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.470900 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.470944 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.470961 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.470982 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.470996 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.572890 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.572925 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.572933 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.572947 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.572957 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.674792 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.674833 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.674846 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.674862 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.674873 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.776728 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.776761 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.776769 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.776782 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.776790 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.879333 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.879373 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.879388 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.879408 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.879419 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.983000 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.983287 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.983387 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.983455 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:53 crc kubenswrapper[4665]: I1205 01:10:53.983510 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:53Z","lastTransitionTime":"2025-12-05T01:10:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.085642 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.085674 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.085682 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.085696 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.085706 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.122402 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/0.log" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.125630 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c" exitCode=1 Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.125669 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.126256 4665 scope.go:117] "RemoveContainer" containerID="bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.150216 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.161649 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.185205 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:53Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:10:53.290217 5834 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:10:53.290247 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 01:10:53.290254 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 01:10:53.290267 5834 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 01:10:53.290349 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 01:10:53.290361 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 01:10:53.290408 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 01:10:53.290427 5834 factory.go:656] Stopping watch factory\\\\nI1205 01:10:53.290438 5834 ovnkube.go:599] Stopped ovnkube\\\\nI1205 01:10:53.290463 5834 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 01:10:53.290477 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:10:53.290484 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 01:10:53.290492 5834 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 01:10:53.290499 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 01:10:53.290505 5834 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.189800 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.189827 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.189836 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.189851 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.189860 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.226444 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.248178 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.262972 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.274904 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.287031 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.291965 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.292012 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.292029 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.292051 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.292066 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.302066 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.315679 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.326150 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.337925 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.351436 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.363571 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.372965 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.394603 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.394729 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.394798 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.394871 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.394934 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.497417 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.497451 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.497462 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.497475 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.497486 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.600117 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.600146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.600154 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.600167 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.600177 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.702436 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.702462 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.702471 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.702493 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.702503 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.804941 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.804975 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.804986 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.805002 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.805015 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.893712 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.893715 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:54 crc kubenswrapper[4665]: E1205 01:10:54.894078 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:54 crc kubenswrapper[4665]: E1205 01:10:54.894970 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.898535 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:54 crc kubenswrapper[4665]: E1205 01:10:54.898666 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.908097 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.908147 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.908157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.908176 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.908188 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:54Z","lastTransitionTime":"2025-12-05T01:10:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.910875 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.925794 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.940332 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.953258 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.965633 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.976075 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:54 crc kubenswrapper[4665]: I1205 01:10:54.987263 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.007324 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.011103 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.011155 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.011168 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.011193 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.011208 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.021980 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.038624 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.055376 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.073428 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.093977 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:53Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:10:53.290217 5834 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:10:53.290247 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 01:10:53.290254 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 01:10:53.290267 5834 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 01:10:53.290349 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 01:10:53.290361 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 01:10:53.290408 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 01:10:53.290427 5834 factory.go:656] Stopping watch factory\\\\nI1205 01:10:53.290438 5834 ovnkube.go:599] Stopped ovnkube\\\\nI1205 01:10:53.290463 5834 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 01:10:53.290477 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:10:53.290484 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 01:10:53.290492 5834 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 01:10:53.290499 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 01:10:53.290505 5834 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.104604 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.113416 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.113455 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.113466 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.113481 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.113490 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.122617 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.129709 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/0.log" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.132607 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.132911 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.146613 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.157343 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.169027 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.182061 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.196429 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.212161 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:53Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:10:53.290217 5834 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:10:53.290247 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 01:10:53.290254 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 01:10:53.290267 5834 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 01:10:53.290349 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 01:10:53.290361 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 01:10:53.290408 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 01:10:53.290427 5834 factory.go:656] Stopping watch factory\\\\nI1205 01:10:53.290438 5834 ovnkube.go:599] Stopped ovnkube\\\\nI1205 01:10:53.290463 5834 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 01:10:53.290477 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:10:53.290484 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 01:10:53.290492 5834 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 01:10:53.290499 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 01:10:53.290505 5834 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.215573 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.215605 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.215615 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.215631 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.215642 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.223933 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.243984 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.258768 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.271464 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.284365 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.295458 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.306903 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.317687 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.317733 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.317744 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.317763 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.317778 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.320320 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.331279 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.420685 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.420730 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.420750 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.420768 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.420781 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.523499 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.523561 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.523580 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.523608 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.523626 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.625796 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.625874 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.625897 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.625948 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.626047 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.728370 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.728475 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.728491 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.728515 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.728531 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.764701 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.764737 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.764749 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.764766 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.764776 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: E1205 01:10:55.780986 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.784789 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.784837 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.784854 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.784877 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.784898 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: E1205 01:10:55.801500 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.806945 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.806998 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.807020 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.807042 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.807058 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: E1205 01:10:55.821013 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.824840 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.824872 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.824881 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.824910 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.824920 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: E1205 01:10:55.837412 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.841205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.841265 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.841280 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.841322 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.841341 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: E1205 01:10:55.861855 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:55 crc kubenswrapper[4665]: E1205 01:10:55.862088 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.864041 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.864071 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.864082 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.864096 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.864106 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.966323 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.966370 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.966380 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.966400 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:55 crc kubenswrapper[4665]: I1205 01:10:55.966413 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:55Z","lastTransitionTime":"2025-12-05T01:10:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.068680 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.068738 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.068757 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.068781 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.068796 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.137595 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/1.log" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.138394 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/0.log" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.141194 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8" exitCode=1 Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.141256 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.141359 4665 scope.go:117] "RemoveContainer" containerID="bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.141832 4665 scope.go:117] "RemoveContainer" containerID="df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8" Dec 05 01:10:56 crc kubenswrapper[4665]: E1205 01:10:56.142008 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.156212 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.168590 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.171237 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.171342 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.171367 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.171397 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.171425 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.180700 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.192159 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.200080 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.229841 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:53Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:10:53.290217 5834 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:10:53.290247 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 01:10:53.290254 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 01:10:53.290267 5834 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 01:10:53.290349 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 01:10:53.290361 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 01:10:53.290408 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 01:10:53.290427 5834 factory.go:656] Stopping watch factory\\\\nI1205 01:10:53.290438 5834 ovnkube.go:599] Stopped ovnkube\\\\nI1205 01:10:53.290463 5834 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 01:10:53.290477 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:10:53.290484 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 01:10:53.290492 5834 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 01:10:53.290499 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 01:10:53.290505 5834 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.257671 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.273640 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.273695 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.273707 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.273724 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.273751 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.284931 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.298102 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.310205 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.321194 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.331241 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.340837 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.351082 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.363592 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:56Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.376242 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.376277 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.376289 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.376322 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.376339 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.479626 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.479696 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.479716 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.479743 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.479761 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.582021 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.582055 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.582066 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.582083 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.582094 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.684842 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.684888 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.684904 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.684928 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.684948 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.786852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.786912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.786932 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.786957 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.786974 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.890266 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.890332 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.890346 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.890364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.890375 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.892498 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.892519 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.892498 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:56 crc kubenswrapper[4665]: E1205 01:10:56.892620 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:56 crc kubenswrapper[4665]: E1205 01:10:56.892675 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:56 crc kubenswrapper[4665]: E1205 01:10:56.892751 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.992715 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.992768 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.992779 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.992799 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:56 crc kubenswrapper[4665]: I1205 01:10:56.992812 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:56Z","lastTransitionTime":"2025-12-05T01:10:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.096087 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.096175 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.096193 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.096260 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.096335 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.111540 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl"] Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.112250 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.115678 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.115698 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.136062 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.146865 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/1.log" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.150865 4665 scope.go:117] "RemoveContainer" containerID="df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8" Dec 05 01:10:57 crc kubenswrapper[4665]: E1205 01:10:57.151158 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.152742 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.167619 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.181856 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.195851 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.198266 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.198333 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.198347 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.198366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.198379 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.209328 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.225133 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.237855 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.248920 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.261082 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.271422 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2e233784-fe06-4bef-a30e-29d8dca2f91d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.271490 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tpjf\" (UniqueName: \"kubernetes.io/projected/2e233784-fe06-4bef-a30e-29d8dca2f91d-kube-api-access-8tpjf\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.271512 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2e233784-fe06-4bef-a30e-29d8dca2f91d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.271533 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2e233784-fe06-4bef-a30e-29d8dca2f91d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.272776 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.286438 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.299880 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.300640 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.300682 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.300697 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.300714 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.300725 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.310378 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.332689 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bdee7e343d8c260064d8c46bead4a6546fa1608cb5a4dbaccd1962813621b77c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:53Z\\\",\\\"message\\\":\\\"e (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:10:53.290217 5834 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:10:53.290247 5834 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 01:10:53.290254 5834 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 01:10:53.290267 5834 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI1205 01:10:53.290349 5834 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI1205 01:10:53.290361 5834 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI1205 01:10:53.290408 5834 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 01:10:53.290427 5834 factory.go:656] Stopping watch factory\\\\nI1205 01:10:53.290438 5834 ovnkube.go:599] Stopped ovnkube\\\\nI1205 01:10:53.290463 5834 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1205 01:10:53.290477 5834 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:10:53.290484 5834 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 01:10:53.290492 5834 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1205 01:10:53.290499 5834 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1205 01:10:53.290505 5834 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1205 0\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.342967 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.354274 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.364542 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.373005 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tpjf\" (UniqueName: \"kubernetes.io/projected/2e233784-fe06-4bef-a30e-29d8dca2f91d-kube-api-access-8tpjf\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.373049 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2e233784-fe06-4bef-a30e-29d8dca2f91d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.373076 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2e233784-fe06-4bef-a30e-29d8dca2f91d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.373154 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2e233784-fe06-4bef-a30e-29d8dca2f91d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.373822 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/2e233784-fe06-4bef-a30e-29d8dca2f91d-env-overrides\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.374025 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/2e233784-fe06-4bef-a30e-29d8dca2f91d-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.376134 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.379223 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/2e233784-fe06-4bef-a30e-29d8dca2f91d-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.391521 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tpjf\" (UniqueName: \"kubernetes.io/projected/2e233784-fe06-4bef-a30e-29d8dca2f91d-kube-api-access-8tpjf\") pod \"ovnkube-control-plane-749d76644c-7xnrl\" (UID: \"2e233784-fe06-4bef-a30e-29d8dca2f91d\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.391642 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.403594 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.403635 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.403647 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.403663 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.403674 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.407746 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.417682 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.428130 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.433674 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.446309 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: W1205 01:10:57.447731 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2e233784_fe06_4bef_a30e_29d8dca2f91d.slice/crio-dbe34b52dc9bbdcfb64c75a91d96fc0b3466e553de49f4113fa34e290bd7698f WatchSource:0}: Error finding container dbe34b52dc9bbdcfb64c75a91d96fc0b3466e553de49f4113fa34e290bd7698f: Status 404 returned error can't find the container with id dbe34b52dc9bbdcfb64c75a91d96fc0b3466e553de49f4113fa34e290bd7698f Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.466699 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.484795 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.502787 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.506138 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.506463 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.506559 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.506634 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.506695 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.515615 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.529991 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.543225 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.556311 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.567904 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.570903 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.582349 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.592620 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.603473 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.611105 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.611128 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.611137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.611151 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.611160 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.615732 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.626872 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.637580 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.655277 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.666389 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.683876 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.697013 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.709258 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.713562 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.713604 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.713615 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.713634 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.713645 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.720957 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.737677 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.751253 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.763821 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.779996 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.816219 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.816264 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.816273 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.816288 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.816484 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.849648 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-xhbdk"] Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.850148 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:57 crc kubenswrapper[4665]: E1205 01:10:57.850204 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.866655 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.885221 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.898326 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.913129 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.918756 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.918794 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.918805 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.918819 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.918837 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:57Z","lastTransitionTime":"2025-12-05T01:10:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.926825 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.945547 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.967082 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.979972 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.980022 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjznw\" (UniqueName: \"kubernetes.io/projected/e1e639aa-4bf7-4baa-a332-62dffec786d8-kube-api-access-rjznw\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.985011 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:57 crc kubenswrapper[4665]: I1205 01:10:57.997996 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.010655 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.021486 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.021624 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.021690 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.021756 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.021820 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.023230 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.035421 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.044846 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.061914 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.073397 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.080845 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.081010 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjznw\" (UniqueName: \"kubernetes.io/projected/e1e639aa-4bf7-4baa-a332-62dffec786d8-kube-api-access-rjznw\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.081033 4665 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.081347 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs podName:e1e639aa-4bf7-4baa-a332-62dffec786d8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:58.58132061 +0000 UTC m=+33.920712929 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs") pod "network-metrics-daemon-xhbdk" (UID: "e1e639aa-4bf7-4baa-a332-62dffec786d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.082846 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.098181 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjznw\" (UniqueName: \"kubernetes.io/projected/e1e639aa-4bf7-4baa-a332-62dffec786d8-kube-api-access-rjznw\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.101185 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.123713 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.123746 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.123754 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.123769 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.123778 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.155807 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" event={"ID":"2e233784-fe06-4bef-a30e-29d8dca2f91d","Type":"ContainerStarted","Data":"944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.155847 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" event={"ID":"2e233784-fe06-4bef-a30e-29d8dca2f91d","Type":"ContainerStarted","Data":"9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.155858 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" event={"ID":"2e233784-fe06-4bef-a30e-29d8dca2f91d","Type":"ContainerStarted","Data":"dbe34b52dc9bbdcfb64c75a91d96fc0b3466e553de49f4113fa34e290bd7698f"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.173632 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.196632 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.212998 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.224016 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.227405 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.227457 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.227476 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.227498 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.227518 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.240561 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.260400 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.276009 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.288060 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.303668 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.316204 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.329961 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.330013 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.330029 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.330044 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.330053 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.331720 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.346605 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.361432 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.375700 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.405426 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.432329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.432372 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.432386 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.432406 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.432419 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.444248 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.483104 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:58Z is after 2025-08-24T17:21:41Z" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.534925 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.534983 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.534992 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.535008 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.535017 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.585580 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.585780 4665 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.585864 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs podName:e1e639aa-4bf7-4baa-a332-62dffec786d8 nodeName:}" failed. No retries permitted until 2025-12-05 01:10:59.585845549 +0000 UTC m=+34.925237848 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs") pod "network-metrics-daemon-xhbdk" (UID: "e1e639aa-4bf7-4baa-a332-62dffec786d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.637423 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.637461 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.637472 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.637486 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.637496 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.686879 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.687086 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:11:14.687057788 +0000 UTC m=+50.026450077 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.740084 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.740141 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.740150 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.740166 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.740176 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.788045 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.788113 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.788198 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.788233 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788250 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788281 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788308 4665 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788359 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:14.788343268 +0000 UTC m=+50.127735567 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788385 4665 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788429 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788458 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788474 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:14.788452871 +0000 UTC m=+50.127845280 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788478 4665 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788520 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:14.788513382 +0000 UTC m=+50.127905801 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788521 4665 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.788590 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:14.788566054 +0000 UTC m=+50.127958433 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.863079 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.863126 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.863137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.863157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.863168 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.894613 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.894716 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.895008 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.895060 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.895097 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:10:58 crc kubenswrapper[4665]: E1205 01:10:58.895135 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.965323 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.965364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.965375 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.965393 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:58 crc kubenswrapper[4665]: I1205 01:10:58.965404 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:58Z","lastTransitionTime":"2025-12-05T01:10:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.067481 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.067944 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.068007 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.068068 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.068125 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.170878 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.170922 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.170932 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.170955 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.170967 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.274146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.274183 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.274192 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.274207 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.274217 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.376774 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.376814 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.376823 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.376838 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.376847 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.478598 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.478650 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.478662 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.478677 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.478687 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.580524 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.580567 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.580580 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.580596 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.580606 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.597096 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:59 crc kubenswrapper[4665]: E1205 01:10:59.597234 4665 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:10:59 crc kubenswrapper[4665]: E1205 01:10:59.597312 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs podName:e1e639aa-4bf7-4baa-a332-62dffec786d8 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:01.597275274 +0000 UTC m=+36.936667573 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs") pod "network-metrics-daemon-xhbdk" (UID: "e1e639aa-4bf7-4baa-a332-62dffec786d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.683218 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.683276 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.683325 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.683355 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.683372 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.785916 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.785979 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.786001 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.786044 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.786061 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.888688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.888759 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.888784 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.888815 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.888838 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.892915 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:10:59 crc kubenswrapper[4665]: E1205 01:10:59.893192 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.990807 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.990876 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.990899 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.990930 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:10:59 crc kubenswrapper[4665]: I1205 01:10:59.990952 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:10:59Z","lastTransitionTime":"2025-12-05T01:10:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.094955 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.095011 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.095024 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.095045 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.095063 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.197920 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.197964 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.198212 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.198244 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.198261 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.301202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.301253 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.301269 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.301291 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.301345 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.404025 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.404088 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.404110 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.404126 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.404138 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.506022 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.506061 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.506075 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.506092 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.506103 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.608366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.608768 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.608935 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.609078 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.609222 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.711838 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.711875 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.711885 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.711903 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.711917 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.815340 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.815650 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.815731 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.815816 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.815931 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.892760 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:00 crc kubenswrapper[4665]: E1205 01:11:00.893167 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.892852 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:00 crc kubenswrapper[4665]: E1205 01:11:00.893472 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.892824 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:00 crc kubenswrapper[4665]: E1205 01:11:00.893731 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.919004 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.919267 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.919494 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.919759 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:00 crc kubenswrapper[4665]: I1205 01:11:00.919891 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:00Z","lastTransitionTime":"2025-12-05T01:11:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.022446 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.022483 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.022495 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.022511 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.022524 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.125571 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.126156 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.126236 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.126339 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.126419 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.229410 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.229454 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.229466 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.229484 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.229503 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.331833 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.331859 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.331886 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.331906 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.331916 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.434482 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.434513 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.434521 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.434534 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.434543 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.536749 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.537001 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.537072 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.537134 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.537229 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.616496 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:01 crc kubenswrapper[4665]: E1205 01:11:01.616688 4665 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:11:01 crc kubenswrapper[4665]: E1205 01:11:01.616815 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs podName:e1e639aa-4bf7-4baa-a332-62dffec786d8 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:05.616788352 +0000 UTC m=+40.956180681 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs") pod "network-metrics-daemon-xhbdk" (UID: "e1e639aa-4bf7-4baa-a332-62dffec786d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.640789 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.641053 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.641161 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.641256 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.641368 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.744389 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.744430 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.744439 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.744453 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.744462 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.846934 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.846969 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.846977 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.846990 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.847000 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.892602 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:01 crc kubenswrapper[4665]: E1205 01:11:01.892741 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.949978 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.950035 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.950044 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.950059 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:01 crc kubenswrapper[4665]: I1205 01:11:01.950070 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:01Z","lastTransitionTime":"2025-12-05T01:11:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.052433 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.052477 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.052490 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.052505 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.052516 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.155708 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.155742 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.155750 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.155765 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.155774 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.258745 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.258826 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.258837 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.258867 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.258883 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.362582 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.362943 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.363114 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.363281 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.363464 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.465690 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.465720 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.465729 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.465742 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.465751 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.567859 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.568252 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.568341 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.568451 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.568535 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.671553 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.671850 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.672041 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.672210 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.672444 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.775319 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.775365 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.775376 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.775391 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.775403 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.877150 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.877188 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.877197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.877213 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.877227 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.893474 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.893684 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:02 crc kubenswrapper[4665]: E1205 01:11:02.893895 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:02 crc kubenswrapper[4665]: E1205 01:11:02.894401 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.894573 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:02 crc kubenswrapper[4665]: E1205 01:11:02.894749 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.980166 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.980195 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.980202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.980214 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:02 crc kubenswrapper[4665]: I1205 01:11:02.980222 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:02Z","lastTransitionTime":"2025-12-05T01:11:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.082491 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.082876 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.083062 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.083393 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.083786 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.185794 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.185849 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.185864 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.185886 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.185901 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.288417 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.288444 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.288454 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.288489 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.288500 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.390971 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.391041 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.391058 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.391081 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.391097 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.493143 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.493187 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.493197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.493213 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.493225 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.595451 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.595518 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.595537 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.595563 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.595583 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.697637 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.697679 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.697691 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.697708 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.697720 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.800592 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.800670 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.800679 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.800701 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.800710 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.893233 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:03 crc kubenswrapper[4665]: E1205 01:11:03.893400 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.903237 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.903275 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.903322 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.903346 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:03 crc kubenswrapper[4665]: I1205 01:11:03.903358 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:03Z","lastTransitionTime":"2025-12-05T01:11:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.005281 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.005335 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.005345 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.005366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.005376 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.107943 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.107972 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.107980 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.107993 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.108002 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.209887 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.209928 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.209939 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.209955 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.209965 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.312677 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.312753 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.312767 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.312810 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.312823 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.414820 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.414863 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.414874 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.414891 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.414903 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.516992 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.517024 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.517032 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.517045 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.517057 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.619425 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.619465 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.619474 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.619488 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.619498 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.721352 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.721391 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.721400 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.721414 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.721424 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.880725 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.880766 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.880774 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.880788 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.880797 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.892944 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:04 crc kubenswrapper[4665]: E1205 01:11:04.893066 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.893614 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:04 crc kubenswrapper[4665]: E1205 01:11:04.893682 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.893767 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:04 crc kubenswrapper[4665]: E1205 01:11:04.893821 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.916635 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.930352 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.940483 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.951646 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.963893 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.973849 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.982981 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.983137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.983227 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.983325 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.983414 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:04Z","lastTransitionTime":"2025-12-05T01:11:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:04 crc kubenswrapper[4665]: I1205 01:11:04.994755 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.009256 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.023609 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.044917 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.059247 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.076615 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.085722 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.085754 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.085763 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.085777 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.085787 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.092443 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.108141 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.119529 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.137180 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.181756 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.188661 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.188703 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.188711 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.188724 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.188733 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.291059 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.291102 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.291110 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.291124 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.291134 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.394439 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.394486 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.394497 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.394517 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.394529 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.496637 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.496676 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.496687 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.496706 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.496718 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.598952 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.598991 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.599003 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.599019 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.599054 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.685998 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:05 crc kubenswrapper[4665]: E1205 01:11:05.686148 4665 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:11:05 crc kubenswrapper[4665]: E1205 01:11:05.686240 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs podName:e1e639aa-4bf7-4baa-a332-62dffec786d8 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:13.686219276 +0000 UTC m=+49.025611565 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs") pod "network-metrics-daemon-xhbdk" (UID: "e1e639aa-4bf7-4baa-a332-62dffec786d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.701110 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.701150 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.701163 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.701180 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.701193 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.803640 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.803680 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.803697 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.803714 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.803725 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.892821 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:05 crc kubenswrapper[4665]: E1205 01:11:05.892963 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.906456 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.906527 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.906549 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.906579 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:05 crc kubenswrapper[4665]: I1205 01:11:05.906604 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:05Z","lastTransitionTime":"2025-12-05T01:11:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.055734 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.055764 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.055772 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.055784 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.055801 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.157951 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.157998 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.158013 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.158034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.158051 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.187814 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.187851 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.187861 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.187875 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.187886 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.199826 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:06Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.202927 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.202994 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.203013 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.203032 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.203070 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.215994 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:06Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.219469 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.219503 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.219512 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.219529 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.219541 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.231316 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:06Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.235156 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.235197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.235209 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.235225 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.235236 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.251982 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:06Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.256228 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.256267 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.256279 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.256396 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.256411 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.271578 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:06Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.271702 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.273325 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.273386 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.273406 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.273428 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.273445 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.375238 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.375273 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.375281 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.375325 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.375338 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.477999 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.478044 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.478055 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.478071 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.478082 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.579950 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.579997 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.580008 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.580059 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.580081 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.682688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.682719 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.682732 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.682752 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.682765 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.785048 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.785099 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.785110 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.785125 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.785137 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.888417 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.888455 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.888467 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.888483 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.888494 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.893524 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.893536 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.893627 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.893731 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.893836 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:06 crc kubenswrapper[4665]: E1205 01:11:06.893958 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.991322 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.991361 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.991371 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.991388 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:06 crc kubenswrapper[4665]: I1205 01:11:06.991401 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:06Z","lastTransitionTime":"2025-12-05T01:11:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.093898 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.093955 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.093995 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.094015 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.094030 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.195879 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.195932 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.195948 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.195972 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.195989 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.298763 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.298809 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.298823 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.298843 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.298855 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.400716 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.400758 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.400770 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.400787 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.400800 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.503209 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.503246 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.503255 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.503272 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.503284 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.605448 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.605509 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.605517 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.605531 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.605540 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.707891 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.707932 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.707943 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.707958 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.707969 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.810655 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.810708 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.810718 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.810733 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.810744 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.893198 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:07 crc kubenswrapper[4665]: E1205 01:11:07.893606 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.893901 4665 scope.go:117] "RemoveContainer" containerID="df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.913869 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.913902 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.913910 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.913923 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:07 crc kubenswrapper[4665]: I1205 01:11:07.913933 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:07Z","lastTransitionTime":"2025-12-05T01:11:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.021026 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.021378 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.021389 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.021405 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.021417 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.124492 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.124553 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.124565 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.124581 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.124592 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.226665 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.226772 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.226796 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.226827 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.226849 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.330157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.330202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.330214 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.330231 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.330242 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.432375 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.432412 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.432421 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.432435 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.432485 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.541142 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.541184 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.541195 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.541212 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.541222 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.643118 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.643157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.643168 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.643183 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.643192 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.745559 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.745601 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.745613 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.745629 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.745640 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.848219 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.848246 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.848254 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.848267 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.848276 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.895060 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:08 crc kubenswrapper[4665]: E1205 01:11:08.895159 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.895321 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:08 crc kubenswrapper[4665]: E1205 01:11:08.895366 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.895582 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:08 crc kubenswrapper[4665]: E1205 01:11:08.895632 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.950714 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.950781 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.950793 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.950811 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:08 crc kubenswrapper[4665]: I1205 01:11:08.950822 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:08Z","lastTransitionTime":"2025-12-05T01:11:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.052930 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.052961 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.052969 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.052982 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.052990 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.155230 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.155267 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.155278 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.155305 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.155314 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.192528 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/1.log" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.196030 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.196755 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.227399 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.245608 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.257930 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.257993 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.258010 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.258083 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.258124 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.265938 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.278708 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.292245 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.314506 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.332963 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.354934 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.363966 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.364021 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.364037 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.364061 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.364077 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.372326 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.388027 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.402116 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.418141 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.436175 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.450353 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.466831 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.466892 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.466913 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.466937 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.466957 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.481422 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.493617 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.505372 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:09Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.569548 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.569594 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.569606 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.569635 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.569649 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.671805 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.672199 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.672264 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.672375 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.672405 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.778034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.778080 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.778094 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.778110 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.778119 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.880228 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.880262 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.880276 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.880315 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.880329 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.892436 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:09 crc kubenswrapper[4665]: E1205 01:11:09.892554 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.983543 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.983590 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.983602 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.983617 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:09 crc kubenswrapper[4665]: I1205 01:11:09.983626 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:09Z","lastTransitionTime":"2025-12-05T01:11:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.085984 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.086040 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.086053 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.086072 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.086086 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.189579 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.189632 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.189644 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.189662 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.189676 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.200039 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/2.log" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.200808 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/1.log" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.203054 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6" exitCode=1 Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.203096 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.203131 4665 scope.go:117] "RemoveContainer" containerID="df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.203846 4665 scope.go:117] "RemoveContainer" containerID="37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6" Dec 05 01:11:10 crc kubenswrapper[4665]: E1205 01:11:10.204027 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.222016 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.231880 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.252589 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df2c51e6271c7348e7ba171fd408e4ea1018109b8bc18ce3f439cdc7b5cc0ba8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:10:55Z\\\",\\\"message\\\":\\\"d to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.openshift.io\\\\\\\": failed to call webhook: Post \\\\\\\"https://127.0.0.1:9743/node?timeout=10s\\\\\\\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:10:54Z is after 2025-08-24T17:21:41Z]\\\\nI1205 01:10:55.016653 5990 services_controller.go:434] Service openshift-machine-config-operator/machine-config-controller retrieved from lister for network=default: \\\\u0026Service{ObjectMeta:{machine-config-controller openshift-machine-config-operator aa30290d-3a39-43ba-a212-6439bd680987 4486 0 2025-02-23 05:12:25 +0000 UTC \\\\u003cnil\\\\u003e \\\\u003cnil\\\\u003e map[k8s-app:machine-config-controller] map[include.release.openshift.io/ibm-cloud-managed:true include.release.openshift.io/self-managed-high-availability:true include.release.openshift.io/single-node-developer:true service.alpha.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168 service.beta.openshift.io/serving-cert-secret-name:mcc-proxy-tls service.beta.openshift.io/serving-cert-signed-by:openshift-service-serving-signer@1740288168] [{config.openshift.io/v1 ClusterVersion version 9101b518-476b-4eea-8fa6-69b0534e5caa 0xc0073ff8bb \\\\u003cnil\\\\u003e}] [] []},Spec:ServiceSpec{Ports:[]ServicePort{ServicePort{Na\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:54Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.266336 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.282576 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.292519 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.292612 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.292672 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.292694 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.292738 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.301665 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.314275 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.327518 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.338623 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.349970 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.360954 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.372422 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.385353 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.395129 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.395162 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.395170 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.395185 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.395195 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.395753 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.405668 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.418175 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.427685 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:10Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.497980 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.498008 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.498018 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.498032 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.498043 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.600635 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.600670 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.600681 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.600697 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.600709 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.703162 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.703198 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.703209 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.703223 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.703234 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.805661 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.805697 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.805706 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.805719 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.805729 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.893048 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.893096 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:10 crc kubenswrapper[4665]: E1205 01:11:10.893175 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.893061 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:10 crc kubenswrapper[4665]: E1205 01:11:10.893346 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:10 crc kubenswrapper[4665]: E1205 01:11:10.893457 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.909153 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.909194 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.909203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.909218 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:10 crc kubenswrapper[4665]: I1205 01:11:10.909229 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:10Z","lastTransitionTime":"2025-12-05T01:11:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.012389 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.012449 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.012468 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.012492 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.012513 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.115399 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.115464 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.115481 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.115504 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.115520 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.211152 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/2.log" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.217832 4665 scope.go:117] "RemoveContainer" containerID="37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6" Dec 05 01:11:11 crc kubenswrapper[4665]: E1205 01:11:11.218426 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.218977 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.219034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.219054 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.219078 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.219095 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.239008 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.257788 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.280494 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.297155 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.310752 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.321545 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.321786 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.321896 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.322020 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.322125 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.326860 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.339917 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.353625 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.366836 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.377237 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.389179 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.415039 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.425435 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.425523 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.425539 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.425587 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.425605 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.427673 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.450871 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.463684 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.474177 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.494922 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:11Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.528228 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.528916 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.528990 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.529055 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.529120 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.632106 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.632152 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.632161 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.632179 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.632191 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.736179 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.736256 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.736281 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.736361 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.736386 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.839639 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.839965 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.840101 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.840240 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.840445 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.893275 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:11 crc kubenswrapper[4665]: E1205 01:11:11.893660 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.942092 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.942141 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.942151 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.942164 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:11 crc kubenswrapper[4665]: I1205 01:11:11.942173 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:11Z","lastTransitionTime":"2025-12-05T01:11:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.044730 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.044783 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.044795 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.044815 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.044828 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.147560 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.147773 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.147835 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.147894 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.147985 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.250145 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.250186 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.250198 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.250214 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.250225 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.352852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.352909 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.352926 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.352949 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.352967 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.460578 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.460713 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.460736 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.460758 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.460775 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.563559 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.563638 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.563660 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.563690 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.563713 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.666322 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.666370 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.666382 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.666400 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.666411 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.768601 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.768660 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.768676 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.768694 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.768704 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.871308 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.871335 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.871343 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.871356 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.871364 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.893083 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:12 crc kubenswrapper[4665]: E1205 01:11:12.893211 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.893466 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:12 crc kubenswrapper[4665]: E1205 01:11:12.893518 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.893977 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:12 crc kubenswrapper[4665]: E1205 01:11:12.894057 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.974050 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.974097 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.974115 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.974131 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:12 crc kubenswrapper[4665]: I1205 01:11:12.974143 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:12Z","lastTransitionTime":"2025-12-05T01:11:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.076447 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.076489 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.076500 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.076517 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.076529 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.178364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.178423 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.178433 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.178447 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.178457 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.280619 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.280676 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.280688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.280703 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.280713 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.384202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.384256 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.384272 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.384323 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.384341 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.487682 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.487717 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.487726 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.487742 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.487750 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.590076 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.590714 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.590778 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.590842 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.590920 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.693693 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.693767 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.693791 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.693843 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.693863 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.764552 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:13 crc kubenswrapper[4665]: E1205 01:11:13.764742 4665 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:11:13 crc kubenswrapper[4665]: E1205 01:11:13.764849 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs podName:e1e639aa-4bf7-4baa-a332-62dffec786d8 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:29.764825249 +0000 UTC m=+65.104217568 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs") pod "network-metrics-daemon-xhbdk" (UID: "e1e639aa-4bf7-4baa-a332-62dffec786d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.796028 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.796070 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.796081 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.796099 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.796111 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.892703 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:13 crc kubenswrapper[4665]: E1205 01:11:13.892896 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.899991 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.900038 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.900054 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.900072 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:13 crc kubenswrapper[4665]: I1205 01:11:13.900085 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:13Z","lastTransitionTime":"2025-12-05T01:11:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.003032 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.003070 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.003079 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.003094 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.003108 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.105493 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.105548 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.105561 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.105582 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.105599 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.208213 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.208259 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.208269 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.208286 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.208564 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.312215 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.312315 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.312333 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.312352 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.312366 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.415661 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.415733 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.415752 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.415784 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.415806 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.518831 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.518877 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.518888 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.518909 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.518921 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.622462 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.623035 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.623062 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.623097 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.623122 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.725276 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.725333 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.725346 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.725362 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.725374 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.775183 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.775375 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:11:46.775348742 +0000 UTC m=+82.114741041 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.827902 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.827966 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.827993 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.828022 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.828044 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.876736 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.876795 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.876835 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.876879 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.876893 4665 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.876989 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:46.876966701 +0000 UTC m=+82.216359000 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877030 4665 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877099 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:46.877083004 +0000 UTC m=+82.216475303 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877033 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877116 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877145 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877162 4665 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877227 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:46.877206317 +0000 UTC m=+82.216598636 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877124 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877263 4665 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.877306 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 01:11:46.877286289 +0000 UTC m=+82.216678588 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.894155 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.894328 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.894350 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.894457 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.894562 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:14 crc kubenswrapper[4665]: E1205 01:11:14.894950 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.929079 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:14Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.929967 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.930088 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.930177 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.930261 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.930386 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:14Z","lastTransitionTime":"2025-12-05T01:11:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.943543 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:14Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.955229 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:14Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.968077 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:14Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.977958 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:14Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:14 crc kubenswrapper[4665]: I1205 01:11:14.987776 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:14Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.005092 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.020755 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.032595 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.032632 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.032643 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.032660 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.032671 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.032477 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.045162 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.059129 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.073431 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.083601 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.105358 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.116170 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.130288 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.134793 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.134824 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.134833 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.134847 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.134856 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.149738 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.237122 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.237166 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.237179 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.237195 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.237222 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.340156 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.340206 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.340219 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.340239 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.340251 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.442763 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.442822 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.442835 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.442853 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.442867 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.479359 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.493062 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.509872 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.530864 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.544878 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.544941 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.544979 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.545003 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.545018 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.549491 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.565203 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.578223 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.590965 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.604437 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.617381 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.628661 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.637767 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.646655 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.646686 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.646695 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.646709 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.646718 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.651459 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.660728 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.670497 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.681853 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.691280 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.707518 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.720698 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:15Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.749167 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.749203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.749212 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.749224 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.749233 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.850998 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.851029 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.851038 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.851050 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.851058 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.892739 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:15 crc kubenswrapper[4665]: E1205 01:11:15.892853 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.952852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.952881 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.952889 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.952902 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:15 crc kubenswrapper[4665]: I1205 01:11:15.952910 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:15Z","lastTransitionTime":"2025-12-05T01:11:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.055144 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.055179 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.055190 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.055205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.055216 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.158629 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.158664 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.158673 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.158688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.158699 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.260531 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.260585 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.260601 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.260623 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.260637 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.362362 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.362398 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.362407 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.362422 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.362431 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.465365 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.465403 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.465411 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.465433 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.465453 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.542762 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.542827 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.542846 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.542870 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.542887 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.560611 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:16Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.565372 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.565429 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.565439 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.565469 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.565479 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.579430 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:16Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.582658 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.582699 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.582714 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.582734 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.582749 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.599354 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:16Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.603327 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.603366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.603378 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.603396 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.603410 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.615069 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:16Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.618483 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.618524 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.618536 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.618555 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.618566 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.635136 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:16Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.635277 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.636503 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.636533 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.636544 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.636557 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.636567 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.739889 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.739957 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.739982 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.740012 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.740037 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.843085 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.843151 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.843217 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.843242 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.843259 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.893289 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.893448 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.893517 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.893691 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.893840 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:16 crc kubenswrapper[4665]: E1205 01:11:16.894056 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.952483 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.952609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.952689 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.952719 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:16 crc kubenswrapper[4665]: I1205 01:11:16.952736 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:16Z","lastTransitionTime":"2025-12-05T01:11:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.055910 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.055949 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.055958 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.055971 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.055981 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.158511 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.158552 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.158565 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.158580 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.158591 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.261237 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.261329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.261346 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.261368 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.261386 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.364578 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.364633 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.364649 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.364670 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.364687 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.467409 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.467471 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.467481 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.467492 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.467502 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.569812 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.569893 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.569912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.569937 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.569954 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.673202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.673274 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.673339 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.673366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.673384 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.776220 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.776350 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.776368 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.776392 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.776409 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.879350 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.879396 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.879408 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.879425 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.879437 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.893123 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:17 crc kubenswrapper[4665]: E1205 01:11:17.893268 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.981728 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.981797 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.981816 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.981840 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:17 crc kubenswrapper[4665]: I1205 01:11:17.981856 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:17Z","lastTransitionTime":"2025-12-05T01:11:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.085880 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.086003 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.086031 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.086063 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.086086 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.188901 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.188978 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.189001 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.189032 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.189056 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.292474 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.292546 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.292570 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.292602 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.292625 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.395678 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.395716 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.395730 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.395745 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.395754 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.499085 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.499126 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.499138 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.499154 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.499164 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.602203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.602245 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.602260 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.602311 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.602323 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.705889 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.705973 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.705990 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.706038 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.706056 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.808565 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.808598 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.808609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.808625 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.808639 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.893075 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.893159 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.893225 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:18 crc kubenswrapper[4665]: E1205 01:11:18.893217 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:18 crc kubenswrapper[4665]: E1205 01:11:18.893445 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:18 crc kubenswrapper[4665]: E1205 01:11:18.893643 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.911547 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.911625 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.911639 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.911653 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:18 crc kubenswrapper[4665]: I1205 01:11:18.911676 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:18Z","lastTransitionTime":"2025-12-05T01:11:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.014745 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.014812 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.014834 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.014862 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.014889 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.120107 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.120167 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.120187 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.120214 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.120230 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.223051 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.223101 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.223115 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.223133 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.223144 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.326149 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.326190 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.326199 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.326214 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.326226 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.429008 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.429093 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.429126 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.429157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.429180 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.532506 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.532553 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.532566 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.532583 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.532598 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.635182 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.635236 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.635248 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.635267 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.635280 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.738657 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.738700 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.738711 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.738728 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.738741 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.841198 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.841252 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.841264 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.841283 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.841311 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.893383 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:19 crc kubenswrapper[4665]: E1205 01:11:19.893586 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.944227 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.944286 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.944320 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.944336 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:19 crc kubenswrapper[4665]: I1205 01:11:19.944347 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:19Z","lastTransitionTime":"2025-12-05T01:11:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.046579 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.046622 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.046631 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.046644 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.046651 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.148878 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.148941 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.148962 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.148991 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.149011 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.252000 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.252048 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.252062 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.252081 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.252094 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.354205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.354246 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.354257 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.354278 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.354310 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.456533 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.456571 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.456581 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.456598 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.456609 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.559037 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.559162 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.559177 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.559193 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.559202 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.661137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.661173 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.661182 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.661198 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.661207 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.763568 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.763602 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.763609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.763625 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.763635 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.866119 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.866156 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.866166 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.866183 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.866193 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.893008 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.893046 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:20 crc kubenswrapper[4665]: E1205 01:11:20.893187 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.893232 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:20 crc kubenswrapper[4665]: E1205 01:11:20.893372 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:20 crc kubenswrapper[4665]: E1205 01:11:20.893448 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.968203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.968259 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.968277 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.968336 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:20 crc kubenswrapper[4665]: I1205 01:11:20.968359 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:20Z","lastTransitionTime":"2025-12-05T01:11:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.071547 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.071610 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.071631 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.071654 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.071672 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.174748 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.174846 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.174870 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.174901 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.174926 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.277447 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.277518 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.277541 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.277571 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.277591 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.380510 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.380594 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.380611 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.380633 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.380652 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.483398 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.483474 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.483494 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.483518 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.483537 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.586378 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.586474 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.586494 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.586519 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.586538 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.689971 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.690034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.690051 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.690074 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.690091 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.840346 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.840419 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.840442 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.840473 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.840495 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.892637 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:21 crc kubenswrapper[4665]: E1205 01:11:21.892833 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.943239 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.943362 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.943387 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.943420 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:21 crc kubenswrapper[4665]: I1205 01:11:21.943442 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:21Z","lastTransitionTime":"2025-12-05T01:11:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.048047 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.048100 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.048111 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.048131 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.048148 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.150702 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.150753 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.150766 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.150786 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.150797 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.252126 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.252176 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.252195 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.252219 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.252236 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.355702 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.355773 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.355797 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.355827 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.355925 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.458557 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.458612 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.458624 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.458642 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.458654 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.563104 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.563203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.563228 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.563264 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.563284 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.667145 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.667191 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.667202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.667221 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.667233 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.769765 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.769811 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.769820 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.769835 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.769844 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.872402 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.872437 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.872446 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.872459 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.872471 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.893215 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.893240 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.893213 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:22 crc kubenswrapper[4665]: E1205 01:11:22.893394 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:22 crc kubenswrapper[4665]: E1205 01:11:22.893596 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:22 crc kubenswrapper[4665]: E1205 01:11:22.893669 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.894263 4665 scope.go:117] "RemoveContainer" containerID="37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6" Dec 05 01:11:22 crc kubenswrapper[4665]: E1205 01:11:22.894414 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.973997 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.974049 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.974062 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.974082 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:22 crc kubenswrapper[4665]: I1205 01:11:22.974097 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:22Z","lastTransitionTime":"2025-12-05T01:11:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.077165 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.077205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.077213 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.077227 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.077236 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.179904 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.179976 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.179999 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.180036 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.180059 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.282614 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.282666 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.282679 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.282695 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.282706 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.385310 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.385348 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.385357 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.385370 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.385379 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.487990 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.488036 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.488047 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.488062 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.488072 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.590582 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.590635 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.590653 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.590671 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.590681 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.697457 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.697500 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.697511 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.697527 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.697537 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.799198 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.799235 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.799244 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.799257 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.799267 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.893444 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:23 crc kubenswrapper[4665]: E1205 01:11:23.893577 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.901181 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.901216 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.901227 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.901242 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:23 crc kubenswrapper[4665]: I1205 01:11:23.901254 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:23Z","lastTransitionTime":"2025-12-05T01:11:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.004097 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.004172 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.004194 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.004223 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.004246 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.106628 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.106669 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.106682 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.106702 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.106719 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.210772 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.210831 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.210852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.210879 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.210897 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.313696 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.313727 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.313736 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.313749 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.313760 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.416584 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.416648 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.416664 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.416686 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.416704 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.518589 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.518641 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.518673 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.518696 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.518711 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.621227 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.621261 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.621272 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.621286 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.621318 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.723611 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.723650 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.723661 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.723679 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.723689 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.826229 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.826265 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.826275 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.826302 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.826312 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.892588 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.892681 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.892748 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:24 crc kubenswrapper[4665]: E1205 01:11:24.892865 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:24 crc kubenswrapper[4665]: E1205 01:11:24.893029 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:24 crc kubenswrapper[4665]: E1205 01:11:24.893211 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.903401 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:24Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.923697 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:24Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.929000 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.929034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.929045 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.929081 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.929092 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:24Z","lastTransitionTime":"2025-12-05T01:11:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.934762 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:24Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.945552 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:24Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.956233 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:24Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.972401 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:24Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.983066 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:24Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:24 crc kubenswrapper[4665]: I1205 01:11:24.994065 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:24Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.007532 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.017773 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.027529 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.030751 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.030782 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.030794 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.030811 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.030824 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.038773 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.049749 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.059260 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.071506 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.080948 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.090726 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.099924 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:25Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.133509 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.133543 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.133555 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.133571 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.133582 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.236146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.236181 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.236191 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.236206 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.236215 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.337738 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.337794 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.337810 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.337829 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.337839 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.440651 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.440709 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.440730 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.440760 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.440778 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.543333 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.543376 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.543385 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.543399 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.543408 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.646398 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.646431 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.646440 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.646454 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.646463 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.749113 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.749158 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.749170 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.749190 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.749201 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.851851 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.851876 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.851884 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.851897 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.851905 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.893205 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:25 crc kubenswrapper[4665]: E1205 01:11:25.893351 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.954476 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.954503 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.954511 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.954524 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:25 crc kubenswrapper[4665]: I1205 01:11:25.954532 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:25Z","lastTransitionTime":"2025-12-05T01:11:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.056915 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.056960 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.056971 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.056989 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.057001 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.159430 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.159461 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.159470 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.159483 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.159492 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.261852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.261918 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.261938 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.261964 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.261981 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.366402 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.366508 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.366529 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.366558 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.366641 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.468990 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.469029 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.469038 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.469055 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.469064 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.571559 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.571609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.571620 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.571636 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.571647 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.673717 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.673975 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.673986 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.674001 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.674012 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.776203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.776260 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.776273 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.776289 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.776321 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.878683 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.878729 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.878741 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.878757 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.878768 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.888609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.888649 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.888667 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.888686 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.888698 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.892708 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.892788 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.892911 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.892956 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.893122 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.893167 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.906957 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:26Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.910838 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.910862 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.910870 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.910882 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.910893 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.924364 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:26Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.927100 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.927125 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.927136 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.927149 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.927158 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.936891 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:26Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.939379 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.939405 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.939414 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.939428 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.939436 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.948754 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:26Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.951406 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.951428 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.951436 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.951447 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.951455 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.963386 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:26Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:26 crc kubenswrapper[4665]: E1205 01:11:26.963543 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.981170 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.981197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.981208 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.981223 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:26 crc kubenswrapper[4665]: I1205 01:11:26.981641 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:26Z","lastTransitionTime":"2025-12-05T01:11:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.083467 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.083522 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.083533 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.083545 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.083554 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.186102 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.186142 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.186152 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.186168 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.186178 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.288912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.288940 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.288950 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.288973 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.288987 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.391275 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.391319 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.391328 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.391340 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.391350 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.493821 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.493870 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.493890 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.493916 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.493935 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.596039 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.596069 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.596078 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.596092 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.596104 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.698034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.698104 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.698115 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.698133 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.698144 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.800595 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.800629 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.800637 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.800655 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.800667 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.892712 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:27 crc kubenswrapper[4665]: E1205 01:11:27.892843 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.902495 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.902525 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.902537 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.902554 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:27 crc kubenswrapper[4665]: I1205 01:11:27.902566 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:27Z","lastTransitionTime":"2025-12-05T01:11:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.005535 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.005579 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.005627 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.005646 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.005658 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.108507 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.108557 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.108566 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.108579 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.108589 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.210709 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.210763 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.210774 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.210788 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.210796 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.312613 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.312643 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.312651 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.312663 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.312673 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.415080 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.415156 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.415173 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.415193 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.415208 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.517453 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.517496 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.517509 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.517529 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.517541 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.619687 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.619754 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.619767 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.619783 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.619794 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.721856 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.721901 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.721912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.721932 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.721946 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.824017 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.824064 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.824076 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.824094 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.824106 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.892424 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.892463 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.892500 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:28 crc kubenswrapper[4665]: E1205 01:11:28.892622 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:28 crc kubenswrapper[4665]: E1205 01:11:28.892682 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:28 crc kubenswrapper[4665]: E1205 01:11:28.892752 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.925983 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.926010 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.926020 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.926034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:28 crc kubenswrapper[4665]: I1205 01:11:28.926044 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:28Z","lastTransitionTime":"2025-12-05T01:11:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.027938 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.027982 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.027995 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.028015 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.028028 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.129688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.129724 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.129735 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.129751 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.129764 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.232145 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.232188 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.232198 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.232216 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.232228 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.334540 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.334576 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.334585 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.334601 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.334610 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.436798 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.436870 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.436886 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.436901 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.436923 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.539503 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.539539 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.539549 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.539563 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.539575 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.641398 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.641463 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.641474 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.641495 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.641507 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.744173 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.744236 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.744244 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.744257 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.744265 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.833965 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:29 crc kubenswrapper[4665]: E1205 01:11:29.834117 4665 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:11:29 crc kubenswrapper[4665]: E1205 01:11:29.834182 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs podName:e1e639aa-4bf7-4baa-a332-62dffec786d8 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:01.83416504 +0000 UTC m=+97.173557339 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs") pod "network-metrics-daemon-xhbdk" (UID: "e1e639aa-4bf7-4baa-a332-62dffec786d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.846137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.846168 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.846177 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.846194 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.846203 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.892682 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:29 crc kubenswrapper[4665]: E1205 01:11:29.892798 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.948808 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.948853 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.948866 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.948885 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:29 crc kubenswrapper[4665]: I1205 01:11:29.948902 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:29Z","lastTransitionTime":"2025-12-05T01:11:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.051611 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.051637 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.051646 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.051660 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.051669 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.153745 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.153785 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.153795 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.153809 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.153819 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.256329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.256357 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.256366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.256379 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.256390 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.358579 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.358622 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.358634 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.358650 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.358664 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.460559 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.460597 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.460606 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.460621 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.460633 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.562613 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.562714 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.562726 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.562743 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.562760 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.665484 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.665872 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.666022 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.666185 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.666366 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.768846 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.768899 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.768908 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.768924 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.768933 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.870892 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.871169 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.871265 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.871375 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.871437 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.893261 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.893337 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.893541 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:30 crc kubenswrapper[4665]: E1205 01:11:30.893694 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:30 crc kubenswrapper[4665]: E1205 01:11:30.893775 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:30 crc kubenswrapper[4665]: E1205 01:11:30.893981 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.905571 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.974278 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.974338 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.974354 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.974370 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:30 crc kubenswrapper[4665]: I1205 01:11:30.974380 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:30Z","lastTransitionTime":"2025-12-05T01:11:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.076260 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.076334 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.076346 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.076364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.076376 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.178623 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.178680 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.178695 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.178717 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.178732 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.281274 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.281318 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.281326 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.281340 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.281349 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.284700 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/0.log" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.284752 4665 generic.go:334] "Generic (PLEG): container finished" podID="be34b4a6-0156-4e21-bae6-12af18583b0d" containerID="84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954" exitCode=1 Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.284791 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j22m9" event={"ID":"be34b4a6-0156-4e21-bae6-12af18583b0d","Type":"ContainerDied","Data":"84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.285453 4665 scope.go:117] "RemoveContainer" containerID="84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.299048 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.309691 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.321034 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:31Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.336516 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.349577 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.361919 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.372898 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.383492 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.383519 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.383529 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.383550 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.383560 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.383737 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.394244 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.404264 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.413617 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.422643 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.432651 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.443010 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.452531 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.466357 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.477817 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.486093 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.486130 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.486142 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.486157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.486170 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.509342 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.556570 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:31Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.588118 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.588152 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.588161 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.588174 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.588183 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.690630 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.690666 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.690674 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.690688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.690698 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.792398 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.792436 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.792445 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.792478 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.792487 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.892648 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:31 crc kubenswrapper[4665]: E1205 01:11:31.892772 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.894407 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.894453 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.894463 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.894477 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.894488 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.997004 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.997033 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.997042 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.997056 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:31 crc kubenswrapper[4665]: I1205 01:11:31.997067 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:31Z","lastTransitionTime":"2025-12-05T01:11:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.100034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.100079 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.100088 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.100103 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.100112 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.203221 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.203264 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.203274 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.203288 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.203316 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.290076 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/0.log" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.290138 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j22m9" event={"ID":"be34b4a6-0156-4e21-bae6-12af18583b0d","Type":"ContainerStarted","Data":"064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.304661 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.307351 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.307387 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.307398 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.307412 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.307422 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.317682 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.330902 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.343894 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.359208 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.371217 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.384007 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.398210 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.408755 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.408780 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.408789 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.408801 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.408809 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.410159 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.420723 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.430363 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.441162 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.450347 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.463816 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.473865 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.490079 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.501521 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.509648 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.510274 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.510310 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.510318 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.510331 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.510339 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.527863 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:32Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.612865 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.612901 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.612975 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.612992 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.613002 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.715148 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.715185 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.715193 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.715206 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.715214 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.817470 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.817503 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.817512 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.817524 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.817533 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.893225 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:32 crc kubenswrapper[4665]: E1205 01:11:32.893362 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.893532 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:32 crc kubenswrapper[4665]: E1205 01:11:32.893581 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.893680 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:32 crc kubenswrapper[4665]: E1205 01:11:32.893726 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.919056 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.919081 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.919090 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.919103 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:32 crc kubenswrapper[4665]: I1205 01:11:32.919112 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:32Z","lastTransitionTime":"2025-12-05T01:11:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.021577 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.021637 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.021663 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.021691 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.021713 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.124416 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.124454 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.124462 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.124476 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.124486 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.226820 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.226858 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.226869 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.226886 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.226897 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.328831 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.328881 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.328899 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.328924 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.328941 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.431744 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.431791 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.431805 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.431823 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.431834 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.534456 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.534495 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.534506 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.534522 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.534531 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.636571 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.636625 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.636634 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.636674 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.636683 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.739775 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.739845 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.739855 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.739871 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.739881 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.842463 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.842511 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.842521 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.842535 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.842585 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.896469 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:33 crc kubenswrapper[4665]: E1205 01:11:33.897507 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.945142 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.945177 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.945187 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.945202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:33 crc kubenswrapper[4665]: I1205 01:11:33.945211 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:33Z","lastTransitionTime":"2025-12-05T01:11:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.047333 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.047371 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.047384 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.047401 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.047412 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.150141 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.150203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.150216 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.150231 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.150243 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.252928 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.252966 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.252975 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.252988 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.252997 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.355719 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.355762 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.355772 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.355786 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.355794 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.458413 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.458491 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.458508 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.458928 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.458994 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.562184 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.562245 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.562261 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.562282 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.562320 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.665157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.665196 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.665206 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.665224 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.665237 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.767322 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.767357 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.767366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.767381 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.767392 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.869604 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.869633 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.869641 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.869657 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.869668 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.893201 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:34 crc kubenswrapper[4665]: E1205 01:11:34.893286 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.893453 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:34 crc kubenswrapper[4665]: E1205 01:11:34.893506 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.894345 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:34 crc kubenswrapper[4665]: E1205 01:11:34.894402 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.906760 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.918596 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.929540 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.942391 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.954794 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.964583 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.971645 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.971665 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.971674 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.971687 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.971696 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:34Z","lastTransitionTime":"2025-12-05T01:11:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.974788 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.984364 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:34 crc kubenswrapper[4665]: I1205 01:11:34.994609 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:34Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.004920 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.014829 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.026870 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.037891 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.050852 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.063192 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.074261 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.074320 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.074329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.074342 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.074352 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.077172 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.089214 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.110727 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.130149 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:35Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.177338 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.177937 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.178034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.178119 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.178221 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.281309 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.281344 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.281353 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.281369 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.281378 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.383661 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.383704 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.383715 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.383731 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.383742 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.486019 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.486051 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.486059 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.486073 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.486083 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.588266 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.588357 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.588373 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.588392 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.588405 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.691325 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.691360 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.691369 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.691383 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.691392 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.793550 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.793587 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.793598 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.793615 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.793625 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.893330 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:35 crc kubenswrapper[4665]: E1205 01:11:35.893737 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.895808 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.895839 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.895888 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.895907 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.895919 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.998127 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.998151 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.998159 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.998171 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:35 crc kubenswrapper[4665]: I1205 01:11:35.998179 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:35Z","lastTransitionTime":"2025-12-05T01:11:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.101096 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.101188 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.101205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.101227 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.101244 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.203162 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.203217 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.203234 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.203267 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.203284 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.305967 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.306117 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.306139 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.306205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.306228 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.408335 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.408364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.408372 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.408386 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.408395 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.510687 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.510741 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.510757 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.510780 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.510801 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.612683 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.612719 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.612728 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.612742 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.612751 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.715167 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.715221 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.715234 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.715254 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.715283 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.817928 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.817970 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.817981 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.817995 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.818006 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.892879 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.892879 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.892893 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:36 crc kubenswrapper[4665]: E1205 01:11:36.893469 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.893525 4665 scope.go:117] "RemoveContainer" containerID="37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6" Dec 05 01:11:36 crc kubenswrapper[4665]: E1205 01:11:36.893518 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:36 crc kubenswrapper[4665]: E1205 01:11:36.893712 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.919826 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.919854 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.919879 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.919891 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:36 crc kubenswrapper[4665]: I1205 01:11:36.919901 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:36Z","lastTransitionTime":"2025-12-05T01:11:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.022093 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.022124 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.022132 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.022145 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.022155 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.049159 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.049205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.049219 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.049239 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.049251 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: E1205 01:11:37.060955 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:37Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.065070 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.065105 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.065117 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.065132 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.065142 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: E1205 01:11:37.081619 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:37Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.084493 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.084534 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.084544 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.084558 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.084568 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: E1205 01:11:37.097053 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:37Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.103796 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.103853 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.103865 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.103881 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.103891 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: E1205 01:11:37.115332 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:37Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.118246 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.118280 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.118314 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.118334 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.118347 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: E1205 01:11:37.131927 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:37Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:37Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:37 crc kubenswrapper[4665]: E1205 01:11:37.132090 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.133494 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.133529 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.133544 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.133563 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.133576 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.236555 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.236608 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.236623 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.236647 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.236659 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.311277 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/2.log" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.341158 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.341244 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.341318 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.341343 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.341356 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.444093 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.444137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.444149 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.444167 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.444182 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.546765 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.546796 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.546805 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.546818 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.546828 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.649469 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.649503 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.649510 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.649524 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.649536 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.752244 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.752274 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.752306 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.752320 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.752328 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.856858 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.856912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.856964 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.856989 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.857045 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.893409 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:37 crc kubenswrapper[4665]: E1205 01:11:37.893589 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.959454 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.959482 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.959490 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.959502 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:37 crc kubenswrapper[4665]: I1205 01:11:37.959510 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:37Z","lastTransitionTime":"2025-12-05T01:11:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.061466 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.061494 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.061503 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.061515 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.061523 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.163283 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.163328 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.163338 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.163350 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.163358 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.265467 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.265515 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.265554 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.265576 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.265593 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.322728 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/2.log" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.330171 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.330950 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.357617 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.367427 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.367444 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.367451 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.367486 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.367496 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.375957 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.392196 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.409080 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.421255 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.434655 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.448162 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.460982 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.469657 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.469784 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.469863 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.469980 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.470158 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.470891 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.483096 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.494718 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.538347 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.551373 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.565941 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.573436 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.573495 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.573508 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.573532 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.573555 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.585170 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.597910 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.618935 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.632682 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.647801 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:38Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.677137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.677364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.677471 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.677607 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.677710 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.780130 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.780184 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.780196 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.780218 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.780242 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.883641 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.884064 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.884253 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.884485 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.884852 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.892885 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.892886 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:38 crc kubenswrapper[4665]: E1205 01:11:38.893234 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:38 crc kubenswrapper[4665]: E1205 01:11:38.893425 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.892999 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:38 crc kubenswrapper[4665]: E1205 01:11:38.893785 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.988216 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.988525 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.988619 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.988752 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:38 crc kubenswrapper[4665]: I1205 01:11:38.988831 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:38Z","lastTransitionTime":"2025-12-05T01:11:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.090647 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.090685 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.090693 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.090706 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.090715 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.193595 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.193635 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.193643 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.193659 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.193668 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.296080 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.296118 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.296127 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.296140 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.296150 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.399260 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.399313 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.399326 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.399341 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.399352 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.501639 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.501702 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.501724 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.501755 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.501777 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.603785 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.603840 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.603860 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.603887 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.603909 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.706423 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.706445 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.706453 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.706465 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.706473 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.809017 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.809082 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.809104 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.809131 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.809153 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.893115 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:39 crc kubenswrapper[4665]: E1205 01:11:39.893230 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.910975 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.911006 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.911014 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.911025 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:39 crc kubenswrapper[4665]: I1205 01:11:39.911033 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:39Z","lastTransitionTime":"2025-12-05T01:11:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.013243 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.013345 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.013364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.013390 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.013411 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.116762 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.116807 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.116818 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.116834 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.116843 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.219427 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.219457 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.219465 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.219479 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.219488 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.321130 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.321180 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.321192 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.321207 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.321218 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.423678 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.423734 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.423744 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.423760 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.423773 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.526139 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.526180 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.526208 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.526224 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.526236 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.628101 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.628127 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.628135 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.628153 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.628171 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.730320 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.730371 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.730403 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.730420 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.730429 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.832861 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.832894 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.832903 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.832915 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.832923 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.892647 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.892721 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.892721 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:40 crc kubenswrapper[4665]: E1205 01:11:40.892824 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:40 crc kubenswrapper[4665]: E1205 01:11:40.892883 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:40 crc kubenswrapper[4665]: E1205 01:11:40.892951 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.935684 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.935754 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.935768 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.935781 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:40 crc kubenswrapper[4665]: I1205 01:11:40.935790 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:40Z","lastTransitionTime":"2025-12-05T01:11:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.038534 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.038605 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.038614 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.038628 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.038636 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.140515 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.140541 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.140565 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.140578 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.140586 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.243160 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.243205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.243230 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.243273 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.243327 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.343025 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/3.log" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.344284 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/2.log" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.347664 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.347713 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.347730 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.347755 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.347773 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.350856 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" exitCode=1 Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.350919 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.350962 4665 scope.go:117] "RemoveContainer" containerID="37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.352213 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:11:41 crc kubenswrapper[4665]: E1205 01:11:41.352545 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.376481 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.388875 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.403188 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.416262 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.426907 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.437802 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.450900 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.450943 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.450954 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.450972 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.450985 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.453915 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.467057 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.478668 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.492445 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.507128 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.521570 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.533952 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.542417 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.552868 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.552906 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.552918 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.552934 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.552946 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.562531 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:40Z\\\",\\\"message\\\":\\\"for removal\\\\nI1205 01:11:38.628645 6551 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 01:11:38.628651 6551 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 01:11:38.628668 6551 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 01:11:38.628675 6551 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 01:11:38.628645 6551 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.628808 6551 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 01:11:38.629067 6551 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 01:11:38.629177 6551 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629269 6551 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629070 6551 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:11:38.629934 6551 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:11:38.629985 6551 factory.go:656] Stopping watch factory\\\\nI1205 01:11:38.629998 6551 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.573184 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.583159 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.592250 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.606243 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:41Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.655188 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.655217 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.655226 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.655239 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.655247 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.757640 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.757676 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.757684 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.757697 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.757706 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.861142 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.861198 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.861210 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.861288 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.861326 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.893320 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:41 crc kubenswrapper[4665]: E1205 01:11:41.893479 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.963637 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.963690 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.963704 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.963721 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:41 crc kubenswrapper[4665]: I1205 01:11:41.963733 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:41Z","lastTransitionTime":"2025-12-05T01:11:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.066411 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.066450 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.066470 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.066485 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.066496 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.168469 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.168511 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.168572 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.168590 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.168603 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.271587 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.271620 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.271628 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.271642 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.271651 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.355167 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/3.log" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.373802 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.373860 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.373872 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.373887 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.373897 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.475772 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.475839 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.475850 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.475866 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.475878 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.578201 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.578250 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.578266 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.578288 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.578326 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.680688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.680752 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.680776 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.680802 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.680818 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.783376 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.783406 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.783414 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.783427 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.783436 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.884777 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.884804 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.884812 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.884825 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.884833 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.892478 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.892530 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:42 crc kubenswrapper[4665]: E1205 01:11:42.892590 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.892478 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:42 crc kubenswrapper[4665]: E1205 01:11:42.892685 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:42 crc kubenswrapper[4665]: E1205 01:11:42.892840 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.988852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.988900 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.988910 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.988930 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:42 crc kubenswrapper[4665]: I1205 01:11:42.988942 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:42Z","lastTransitionTime":"2025-12-05T01:11:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.092203 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.092252 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.092269 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.092320 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.092337 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.195074 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.195146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.195160 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.195178 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.195193 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.298242 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.298344 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.298370 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.298399 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.298422 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.401369 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.401408 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.401422 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.401443 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.401458 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.503978 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.504328 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.504496 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.504526 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.504540 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.607009 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.607112 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.607134 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.607163 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.607185 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.709071 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.709176 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.709202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.709230 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.709251 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.812685 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.812732 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.812748 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.812770 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.812786 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.893157 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:43 crc kubenswrapper[4665]: E1205 01:11:43.893368 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.915937 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.915973 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.915987 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.916006 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:43 crc kubenswrapper[4665]: I1205 01:11:43.916018 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:43Z","lastTransitionTime":"2025-12-05T01:11:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.018075 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.018115 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.018126 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.018143 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.018153 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.120749 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.120788 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.120797 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.120815 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.120825 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.223118 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.223184 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.223201 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.223222 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.223239 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.325925 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.325972 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.325984 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.326002 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.326012 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.428876 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.428906 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.428916 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.428932 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.428941 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.531929 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.531965 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.531976 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.531991 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.532001 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.633921 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.633971 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.633982 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.633999 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.634010 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.736262 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.736329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.736342 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.736356 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.736366 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.838285 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.838338 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.838348 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.838364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.838375 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.893082 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:44 crc kubenswrapper[4665]: E1205 01:11:44.893183 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.893092 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:44 crc kubenswrapper[4665]: E1205 01:11:44.893240 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.894442 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:44 crc kubenswrapper[4665]: E1205 01:11:44.894537 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.908745 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.922336 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.935605 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.939804 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.939836 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.939847 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.939862 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.939872 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:44Z","lastTransitionTime":"2025-12-05T01:11:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.952328 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.964373 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.975888 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:44 crc kubenswrapper[4665]: I1205 01:11:44.992271 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://37654f6dba99ea31449aeebf96440888f375d6dfae26b4970a1393604b8970f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:09Z\\\",\\\"message\\\":\\\"enshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:false, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{}, Templates:services.TemplateMap{}, Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}, built lbs: []services.LB{services.LB{Name:\\\\\\\"Service_openshift-image-registry/image-registry_TCP_cluster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-image-registry/image-registry\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.5.93\\\\\\\", Port:5000, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1205 01:11:08.951199 6194 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1205 01:11:08.951202 6194 obj_retry.go:434] periodicallyRetryResources: Retry channel got triggered: retrying failed objects of type *v1.Pod\\\\nF1205 01:11:08.951251 6194 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:08Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:40Z\\\",\\\"message\\\":\\\"for removal\\\\nI1205 01:11:38.628645 6551 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 01:11:38.628651 6551 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 01:11:38.628668 6551 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 01:11:38.628675 6551 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 01:11:38.628645 6551 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.628808 6551 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 01:11:38.629067 6551 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 01:11:38.629177 6551 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629269 6551 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629070 6551 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:11:38.629934 6551 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:11:38.629985 6551 factory.go:656] Stopping watch factory\\\\nI1205 01:11:38.629998 6551 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:44Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.008143 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.021636 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.031473 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.042166 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.042193 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.042201 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.042214 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.042226 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.048667 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.059008 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.070515 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.080495 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.092660 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.108832 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.126383 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.144536 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.144574 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.144583 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.144598 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.144607 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.148707 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.172157 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:45Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.247146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.247181 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.247189 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.247204 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.247213 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.350246 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.350332 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.350356 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.350383 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.350403 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.453199 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.453256 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.453274 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.453333 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.453352 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.556912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.556968 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.556985 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.557008 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.557024 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.660414 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.660480 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.660506 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.660537 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.660554 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.763494 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.763536 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.763551 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.763587 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.763602 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.866373 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.866603 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.866689 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.866761 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.866821 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.893224 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:45 crc kubenswrapper[4665]: E1205 01:11:45.893410 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.969407 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.969724 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.969822 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.969925 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:45 crc kubenswrapper[4665]: I1205 01:11:45.970004 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:45Z","lastTransitionTime":"2025-12-05T01:11:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.073100 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.073167 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.073190 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.073219 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.073244 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.176372 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.176429 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.176448 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.176471 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.176488 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.278600 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.278642 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.278650 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.278663 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.278671 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.381034 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.381073 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.381081 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.381119 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.381130 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.483665 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.483721 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.483739 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.483763 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.483779 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.587163 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.587238 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.587262 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.587328 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.587359 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.690228 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.690272 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.690287 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.690325 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.690336 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.792826 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.792870 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.792882 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.792900 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.792916 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.810749 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.810916 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:50.810894317 +0000 UTC m=+146.150286626 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.892511 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.892679 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.892990 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.893110 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.893622 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.893719 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.895368 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.895397 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.895412 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.895439 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.895455 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.938383 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.938444 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.938480 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.938590 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938619 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938656 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938672 4665 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938684 4665 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938620 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938754 4665 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938804 4665 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938835 4665 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938743 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:50.938718669 +0000 UTC m=+146.278110988 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938898 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:50.938881512 +0000 UTC m=+146.278273821 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938918 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:50.938908143 +0000 UTC m=+146.278300452 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 01:11:46 crc kubenswrapper[4665]: E1205 01:11:46.938934 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:50.938926763 +0000 UTC m=+146.278319072 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.997480 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.997525 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.997553 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.997572 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:46 crc kubenswrapper[4665]: I1205 01:11:46.997584 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:46Z","lastTransitionTime":"2025-12-05T01:11:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.099673 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.099715 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.099727 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.099744 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.099757 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.202814 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.202872 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.202889 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.202913 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.202930 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.305520 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.305567 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.305624 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.305644 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.305657 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.379224 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.379265 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.379277 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.379315 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.379328 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: E1205 01:11:47.397051 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.400768 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.400798 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.400806 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.400818 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.400827 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: E1205 01:11:47.413932 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.417764 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.417804 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.417813 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.417826 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.417835 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: E1205 01:11:47.433761 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.437750 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.437797 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.437809 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.437825 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.438103 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: E1205 01:11:47.453816 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.456246 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.456277 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.456286 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.456318 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.456330 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: E1205 01:11:47.467418 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:47Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:47Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:47 crc kubenswrapper[4665]: E1205 01:11:47.467528 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.468710 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.468786 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.468796 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.468810 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.468821 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.571614 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.571645 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.571662 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.571675 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.571684 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.674824 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.675187 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.675209 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.675236 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.675259 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.778156 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.778228 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.778251 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.778281 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.778341 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.881223 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.881279 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.881330 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.881355 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.881373 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.892529 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:47 crc kubenswrapper[4665]: E1205 01:11:47.892659 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.984562 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.984607 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.984617 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.984633 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:47 crc kubenswrapper[4665]: I1205 01:11:47.984644 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:47Z","lastTransitionTime":"2025-12-05T01:11:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.088072 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.088143 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.088161 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.088182 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.088196 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.190561 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.190628 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.190649 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.190674 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.190693 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.292828 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.293131 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.293374 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.293568 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.293713 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.396371 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.396723 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.396975 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.397197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.397487 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.499894 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.499961 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.499984 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.500012 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.500031 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.602738 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.602785 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.602796 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.602814 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.602828 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.705190 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.705238 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.705250 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.705271 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.705282 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.807998 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.808031 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.808039 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.808051 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.808060 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.893465 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.893501 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:48 crc kubenswrapper[4665]: E1205 01:11:48.893576 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.893465 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:48 crc kubenswrapper[4665]: E1205 01:11:48.893670 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:48 crc kubenswrapper[4665]: E1205 01:11:48.893743 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.909991 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.910070 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.910089 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.910113 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:48 crc kubenswrapper[4665]: I1205 01:11:48.910130 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:48Z","lastTransitionTime":"2025-12-05T01:11:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.012579 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.012645 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.012654 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.012668 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.012676 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.115992 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.116086 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.116113 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.116146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.116170 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.223515 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.223573 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.223591 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.223616 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.223635 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.326472 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.326554 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.326580 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.326613 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.326637 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.430006 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.430119 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.430136 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.430160 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.430173 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.532487 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.532782 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.532958 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.533108 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.533250 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.635786 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.636405 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.636532 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.636626 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.636748 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.739639 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.739688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.739703 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.739727 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.739742 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.842230 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.842271 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.842287 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.842329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.842345 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.892704 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:49 crc kubenswrapper[4665]: E1205 01:11:49.892865 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.944726 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.944797 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.944809 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.944828 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:49 crc kubenswrapper[4665]: I1205 01:11:49.944840 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:49Z","lastTransitionTime":"2025-12-05T01:11:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.047783 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.047852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.047874 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.047901 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.047921 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.150988 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.151097 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.151168 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.151205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.151228 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.254166 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.254220 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.254236 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.254259 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.254277 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.356722 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.356771 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.356786 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.356808 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.356824 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.460255 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.460361 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.460385 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.460414 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.460476 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.563937 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.563999 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.564016 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.564040 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.564058 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.666415 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.666457 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.666469 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.666489 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.666541 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.769279 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.769346 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.769362 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.769382 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.769397 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.871717 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.871808 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.871832 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.871863 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.871885 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.892852 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.892908 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.892965 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:50 crc kubenswrapper[4665]: E1205 01:11:50.893104 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:50 crc kubenswrapper[4665]: E1205 01:11:50.893232 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:50 crc kubenswrapper[4665]: E1205 01:11:50.893433 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.975189 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.975344 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.975372 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.975403 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:50 crc kubenswrapper[4665]: I1205 01:11:50.975427 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:50Z","lastTransitionTime":"2025-12-05T01:11:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.077921 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.077948 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.077956 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.077968 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.078002 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.180908 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.180993 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.181009 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.181030 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.181045 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.283923 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.283965 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.283982 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.284003 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.284018 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.386573 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.386642 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.386655 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.386671 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.386684 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.489625 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.489669 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.489679 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.489696 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.489706 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.591982 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.592024 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.592033 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.592067 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.592079 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.694053 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.694091 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.694102 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.694124 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.694135 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.796761 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.796806 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.796815 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.796835 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.796846 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.892685 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:51 crc kubenswrapper[4665]: E1205 01:11:51.892826 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.899323 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.899359 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.899368 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.899382 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:51 crc kubenswrapper[4665]: I1205 01:11:51.899392 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:51Z","lastTransitionTime":"2025-12-05T01:11:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.002392 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.002452 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.002470 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.002494 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.002512 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.105140 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.105187 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.105200 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.105216 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.105227 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.208213 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.208353 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.208377 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.208402 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.208420 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.311671 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.311736 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.311750 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.311767 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.311779 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.414613 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.414688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.414701 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.414715 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.414726 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.517206 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.517250 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.517258 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.517274 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.517283 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.619946 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.619983 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.619994 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.620010 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.620024 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.722742 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.722876 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.722899 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.722926 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.722952 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.825705 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.825744 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.825757 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.825776 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.825811 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.892767 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.892818 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.892767 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:52 crc kubenswrapper[4665]: E1205 01:11:52.892891 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:52 crc kubenswrapper[4665]: E1205 01:11:52.892946 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:52 crc kubenswrapper[4665]: E1205 01:11:52.893030 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.927735 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.927768 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.927776 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.927791 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:52 crc kubenswrapper[4665]: I1205 01:11:52.927799 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:52Z","lastTransitionTime":"2025-12-05T01:11:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.030200 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.030279 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.030329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.030354 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.030371 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.132663 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.132735 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.132751 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.132769 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.132780 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.234665 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.234694 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.234703 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.234716 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.234726 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.336759 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.336814 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.336830 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.336857 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.336875 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.439786 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.439843 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.439862 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.439888 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.439905 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.542796 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.542859 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.542880 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.543116 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.543153 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.646078 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.646135 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.646154 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.646191 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.646236 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.748976 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.749026 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.749042 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.749066 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.749082 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.851679 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.851734 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.851752 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.851773 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.851789 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.892847 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:53 crc kubenswrapper[4665]: E1205 01:11:53.893066 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.894015 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:11:53 crc kubenswrapper[4665]: E1205 01:11:53.894267 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.909577 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:53Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.920061 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:53Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.928990 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:53Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.938199 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:53Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.953411 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:53Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.954068 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.954101 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.954169 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.954185 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.954194 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:53Z","lastTransitionTime":"2025-12-05T01:11:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.967006 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:53Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.980024 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:53Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:53 crc kubenswrapper[4665]: I1205 01:11:53.994407 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:53Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.008868 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.023543 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.036394 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.048817 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.056271 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.056319 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.056329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.056342 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.056352 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.061433 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.073346 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.090125 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:40Z\\\",\\\"message\\\":\\\"for removal\\\\nI1205 01:11:38.628645 6551 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 01:11:38.628651 6551 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 01:11:38.628668 6551 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 01:11:38.628675 6551 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 01:11:38.628645 6551 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.628808 6551 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 01:11:38.629067 6551 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 01:11:38.629177 6551 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629269 6551 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629070 6551 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:11:38.629934 6551 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:11:38.629985 6551 factory.go:656] Stopping watch factory\\\\nI1205 01:11:38.629998 6551 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.099107 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.108363 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.117838 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.138780 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.158851 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.158916 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.158936 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.158966 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.158990 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.262403 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.262562 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.262584 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.262610 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.262628 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.366338 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.366395 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.366411 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.366432 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.366447 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.469436 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.469504 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.469521 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.469545 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.469576 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.572331 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.572366 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.572375 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.572392 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.572402 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.676288 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.676352 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.676364 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.676382 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.676396 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.780498 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.780576 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.780596 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.780620 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.780637 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.883615 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.883688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.883712 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.883742 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.883766 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.892994 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.893096 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:54 crc kubenswrapper[4665]: E1205 01:11:54.893223 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:54 crc kubenswrapper[4665]: E1205 01:11:54.893448 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.893575 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:54 crc kubenswrapper[4665]: E1205 01:11:54.893754 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.917032 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.941109 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.961010 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.979032 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.990142 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.990221 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.990245 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.990277 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.990332 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:54Z","lastTransitionTime":"2025-12-05T01:11:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:54 crc kubenswrapper[4665]: I1205 01:11:54.999589 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:54Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.021738 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.037954 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.059380 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.075375 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.091230 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.093140 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.093173 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.093188 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.093206 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.093218 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.107789 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.125857 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.141148 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.161351 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.174315 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.196449 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.196497 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.196513 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.196535 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.196550 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.201810 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:40Z\\\",\\\"message\\\":\\\"for removal\\\\nI1205 01:11:38.628645 6551 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 01:11:38.628651 6551 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 01:11:38.628668 6551 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 01:11:38.628675 6551 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 01:11:38.628645 6551 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.628808 6551 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 01:11:38.629067 6551 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 01:11:38.629177 6551 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629269 6551 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629070 6551 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:11:38.629934 6551 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:11:38.629985 6551 factory.go:656] Stopping watch factory\\\\nI1205 01:11:38.629998 6551 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.216538 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.235285 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.262717 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:55Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.298986 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.299049 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.299068 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.299093 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.299112 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.403192 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.403229 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.403238 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.403251 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.403260 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.506656 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.506719 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.506737 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.506763 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.506780 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.609388 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.609444 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.609460 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.609484 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.609502 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.711863 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.711941 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.711966 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.711995 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.712019 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.814808 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.814852 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.814864 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.814879 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.814891 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.892582 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:55 crc kubenswrapper[4665]: E1205 01:11:55.892772 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.918232 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.918291 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.918334 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.918362 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:55 crc kubenswrapper[4665]: I1205 01:11:55.918380 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:55Z","lastTransitionTime":"2025-12-05T01:11:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.021214 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.021259 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.021275 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.021318 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.021334 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.123856 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.123923 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.123941 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.123976 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.123997 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.226667 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.226716 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.226734 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.226757 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.226773 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.328873 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.328938 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.328957 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.328984 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.329003 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.431660 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.431761 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.431785 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.431810 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.431829 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.534783 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.534850 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.534877 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.534912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.534940 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.638016 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.638068 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.638079 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.638094 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.638120 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.740519 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.740552 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.740595 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.740616 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.740626 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.842528 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.842595 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.842603 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.842615 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.842624 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.893384 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.893426 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:56 crc kubenswrapper[4665]: E1205 01:11:56.893529 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.893389 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:56 crc kubenswrapper[4665]: E1205 01:11:56.893657 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:56 crc kubenswrapper[4665]: E1205 01:11:56.893731 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.944700 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.944768 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.944791 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.944815 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:56 crc kubenswrapper[4665]: I1205 01:11:56.944834 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:56Z","lastTransitionTime":"2025-12-05T01:11:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.048054 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.048120 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.048141 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.048163 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.048177 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.150961 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.151049 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.151077 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.151111 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.151133 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.254108 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.254166 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.254185 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.254208 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.254225 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.357228 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.357351 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.357380 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.357411 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.357434 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.460015 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.460076 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.460100 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.460129 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.460153 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.563105 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.563137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.563145 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.563157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.563166 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.610420 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.610501 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.610515 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.610535 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.610547 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: E1205 01:11:57.623265 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.627333 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.627377 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.627391 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.627441 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.627455 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: E1205 01:11:57.643678 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.647819 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.647853 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.647862 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.647874 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.647883 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: E1205 01:11:57.660514 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.664462 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.664556 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.664578 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.664615 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.664633 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: E1205 01:11:57.678329 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.682207 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.682262 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.682274 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.682321 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.682333 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: E1205 01:11:57.693489 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"7800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"24148068Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"8\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"24608868Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:57Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"488e7d27-a26c-435c-9dd7-9f8a30d10fc0\\\",\\\"systemUUID\\\":\\\"58fe2495-9a84-4b13-8fa7-031f802ab624\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:11:57Z is after 2025-08-24T17:21:41Z" Dec 05 01:11:57 crc kubenswrapper[4665]: E1205 01:11:57.693667 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.695005 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.695050 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.695093 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.695112 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.695127 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.797344 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.797379 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.797390 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.797405 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.797415 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.892689 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:57 crc kubenswrapper[4665]: E1205 01:11:57.892824 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.899035 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.899062 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.899070 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.899100 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:57 crc kubenswrapper[4665]: I1205 01:11:57.899109 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:57Z","lastTransitionTime":"2025-12-05T01:11:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.001938 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.002018 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.002036 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.002057 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.002071 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.104359 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.104408 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.104419 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.104436 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.104448 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.207228 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.207275 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.207318 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.207341 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.207356 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.309566 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.309609 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.309692 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.309706 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.309714 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.411708 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.411754 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.411767 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.411784 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.411796 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.514693 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.515197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.515433 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.515620 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.515792 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.618923 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.619025 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.619039 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.619057 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.619081 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.722208 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.722284 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.722362 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.722396 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.722419 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.825550 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.825603 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.825620 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.825647 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.825665 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.892654 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.892805 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:11:58 crc kubenswrapper[4665]: E1205 01:11:58.892833 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.892898 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:11:58 crc kubenswrapper[4665]: E1205 01:11:58.893004 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:11:58 crc kubenswrapper[4665]: E1205 01:11:58.893141 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.929103 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.929137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.929146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.929162 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:58 crc kubenswrapper[4665]: I1205 01:11:58.929170 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:58Z","lastTransitionTime":"2025-12-05T01:11:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.032347 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.032398 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.032412 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.032431 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.032445 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.134674 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.134707 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.134720 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.134736 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.134746 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.238105 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.238183 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.238202 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.238233 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.238252 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.341150 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.341213 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.341225 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.341245 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.341256 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.444149 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.444219 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.444240 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.444335 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.444360 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.547368 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.547417 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.547428 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.547447 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.547458 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.650165 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.650276 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.650328 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.650358 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.650376 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.753746 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.753795 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.753808 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.753828 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.753842 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.856711 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.856790 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.856834 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.856857 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.856873 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.893332 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:11:59 crc kubenswrapper[4665]: E1205 01:11:59.893461 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.959576 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.959653 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.959688 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.959705 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:11:59 crc kubenswrapper[4665]: I1205 01:11:59.959729 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:11:59Z","lastTransitionTime":"2025-12-05T01:11:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.062672 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.062768 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.062792 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.062816 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.062832 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.166070 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.166136 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.166154 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.166180 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.166198 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.268579 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.268653 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.268677 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.268710 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.268736 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.371706 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.371805 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.371823 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.372380 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.372469 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.475687 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.475755 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.475779 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.475810 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.475836 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.578803 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.578869 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.578887 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.578910 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.578928 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.681279 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.681374 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.681399 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.681421 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.681435 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.784148 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.784211 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.784233 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.784258 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.784276 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.887241 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.887371 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.887380 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.887415 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.887428 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.893432 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:00 crc kubenswrapper[4665]: E1205 01:12:00.893531 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.893427 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:00 crc kubenswrapper[4665]: E1205 01:12:00.893743 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.893754 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:00 crc kubenswrapper[4665]: E1205 01:12:00.893884 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.989804 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.990131 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.990276 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.990468 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:00 crc kubenswrapper[4665]: I1205 01:12:00.990611 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:00Z","lastTransitionTime":"2025-12-05T01:12:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.094128 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.094194 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.094216 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.094243 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.094264 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.197472 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.197535 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.197558 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.197606 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.197630 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.301129 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.301185 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.301225 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.301259 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.301281 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.403522 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.403620 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.403638 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.403661 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.403682 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.507912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.507967 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.507985 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.508009 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.508027 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.610695 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.610769 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.610793 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.610825 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.610847 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.713907 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.714273 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.714526 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.714773 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.714976 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.817919 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.818560 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.818677 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.818812 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.818940 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.892760 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:01 crc kubenswrapper[4665]: E1205 01:12:01.893145 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.893211 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:01 crc kubenswrapper[4665]: E1205 01:12:01.893396 4665 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:12:01 crc kubenswrapper[4665]: E1205 01:12:01.893591 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs podName:e1e639aa-4bf7-4baa-a332-62dffec786d8 nodeName:}" failed. No retries permitted until 2025-12-05 01:13:05.893564632 +0000 UTC m=+161.232956941 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs") pod "network-metrics-daemon-xhbdk" (UID: "e1e639aa-4bf7-4baa-a332-62dffec786d8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.921753 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.921949 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.922069 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.922183 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:01 crc kubenswrapper[4665]: I1205 01:12:01.922273 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:01Z","lastTransitionTime":"2025-12-05T01:12:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.024845 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.024895 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.024912 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.024932 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.024948 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.127639 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.127682 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.127697 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.127717 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.127728 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.230326 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.230380 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.230404 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.230428 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.230443 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.333085 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.333131 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.333143 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.333161 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.333195 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.435197 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.435251 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.435263 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.435280 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.435357 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.537738 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.537776 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.537787 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.537804 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.537817 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.640006 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.640083 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.640096 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.640111 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.640127 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.743181 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.743222 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.743230 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.743245 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.743256 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.846141 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.846172 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.846185 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.846200 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.846212 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.892654 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.892719 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:02 crc kubenswrapper[4665]: E1205 01:12:02.892801 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.892654 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:02 crc kubenswrapper[4665]: E1205 01:12:02.892931 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:02 crc kubenswrapper[4665]: E1205 01:12:02.892996 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.948834 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.948890 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.948904 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.948920 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:02 crc kubenswrapper[4665]: I1205 01:12:02.948932 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:02Z","lastTransitionTime":"2025-12-05T01:12:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.051808 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.051847 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.051858 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.051878 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.051889 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.154141 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.154895 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.154935 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.154957 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.154971 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.257429 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.257496 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.257509 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.257527 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.257539 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.359948 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.359980 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.359990 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.360003 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.360013 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.462876 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.462948 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.462964 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.462987 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.463003 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.564891 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.564927 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.564937 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.564951 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.564963 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.667435 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.667478 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.667489 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.667504 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.667515 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.769925 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.769973 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.769987 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.770005 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.770017 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.872926 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.872972 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.872984 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.873003 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.873015 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.893358 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:03 crc kubenswrapper[4665]: E1205 01:12:03.893502 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.976329 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.976400 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.976420 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.976452 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:03 crc kubenswrapper[4665]: I1205 01:12:03.976474 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:03Z","lastTransitionTime":"2025-12-05T01:12:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.079539 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.079642 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.079660 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.079682 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.079700 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.182158 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.182222 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.182233 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.182245 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.182256 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.284150 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.284184 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.284193 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.284206 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.284216 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.387057 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.387152 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.387171 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.387195 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.387212 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.489712 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.489761 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.489772 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.489786 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.489798 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.591983 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.592023 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.592035 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.592054 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.592064 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.694513 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.694552 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.694561 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.694577 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.694586 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.796530 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.796567 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.796578 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.796592 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.796601 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.893553 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.893629 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.893632 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:04 crc kubenswrapper[4665]: E1205 01:12:04.893736 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:04 crc kubenswrapper[4665]: E1205 01:12:04.893850 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:04 crc kubenswrapper[4665]: E1205 01:12:04.894181 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.894494 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:12:04 crc kubenswrapper[4665]: E1205 01:12:04.894700 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.899157 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.899184 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.899193 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.899205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.899214 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:04Z","lastTransitionTime":"2025-12-05T01:12:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.909333 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-j22m9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"be34b4a6-0156-4e21-bae6-12af18583b0d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:32Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:30Z\\\",\\\"message\\\":\\\"2025-12-05T01:10:45+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364\\\\n2025-12-05T01:10:45+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_2c5f21c9-68cf-4c1e-aa8c-b48fff93d364 to /host/opt/cni/bin/\\\\n2025-12-05T01:10:45Z [verbose] multus-daemon started\\\\n2025-12-05T01:10:45Z [verbose] Readiness Indicator file check\\\\n2025-12-05T01:11:30Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:11:31Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9km6c\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-j22m9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.925815 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-jl867" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3391189b-95c4-4746-8aea-1d3be0b4ae1a\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://591fdc257cfce6224aef82ac4ae207962453b16879b978ba19748b46ecd44a2a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3c5b6d1332c956c3bd1c87e77f4293b9cf3d2bb0877c7ff78f5dd97a6d42085f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0297254f9091827e2a05655e6811d956325a96b02cd217a64d3f67713f8d5c86\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://310f3517ad8a37a90546ec41a3b28623fb04296fc50f12f423c68ceccac711e6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fc2d296afa2c5284d8f1f6340dd125b3a99f47872cb445be7658205c732bea2e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:47Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b35d8911e91e747703155991b5523a5aa1bae137de661c206a9fb8faeb10159f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:48Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7d7e2319233eecf00416c7a57e94477950f7de9b242c1073aa612e11786684fe\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:49Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:49Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-j2qs5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-jl867\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.948131 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e34d2b-42ec-4356-bd31-56ae869e58b0\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"ing back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": net/http: TLS handshake timeout\\\\nI1205 01:10:37.264942 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1205 01:10:37.265843 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-2442532982/tls.crt::/tmp/serving-cert-2442532982/tls.key\\\\\\\"\\\\nI1205 01:10:42.562391 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI1205 01:10:42.564721 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI1205 01:10:42.564743 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI1205 01:10:42.564763 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI1205 01:10:42.564779 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 01:10:42.569902 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 01:10:42.569923 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569928 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 01:10:42.569932 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 01:10:42.569935 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 01:10:42.569938 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 01:10:42.569941 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 01:10:42.570090 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nF1205 01:10:42.571550 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.965777 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"889d53ba-f6b3-4293-8565-01740293dfba\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:11:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ba79066d653d7bb787b2e25c4814d52630cb5b93d5a64706a7ed4438db92204\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b526f85582e7a9dd8a7cd3ecbcf11caca2a83cd5b9a9e3f70cc3c7ee9680e7d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://02e1517bd35ee9ac7c95a0aa67ccd5d85fbcb63c1cd17c3acd10a493649d4629\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c2504ed26a239c4f11d977aad9a4cc0ef5c9a7eefee20790a21dd013288a06a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.983991 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:04 crc kubenswrapper[4665]: I1205 01:12:04.997652 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:04Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.001137 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.001171 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.001183 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.001199 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.001210 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.013939 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e1700749307928d51519530aa8c08cbe646216723e57330bcb20becc9c0dda3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://04aca738a9d110041fcd3e3ef074e7079c5958a0b54cab22fbcb2c0b7f3eec46\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.029012 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:42Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.045004 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"010a5924-aa5f-49f0-9ab6-a9b69a131d69\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://2fd61b6e4fc093fc595b2bc5ff189b9fe651a03da9f7ea034ba63f20da57803e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://589118d12d58d9c27b8472304234322666a534049cf9ae392d7bd62af648a74f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://57676394e89275c14d635f99df590707eda8c07f5d2d6b7eea48a24e9214ad51\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:25Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.056980 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c8aedd922e2d27b6c6e148464d9391c41d95bf39310988b09b4f50d0987feeaf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.067914 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e561e3d26ea90c6be624a19fe5c99f69cb2a39eb72e09cdcf8e5c96936a52efe\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rblk6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-rgbtc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.081953 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2e233784-fe06-4bef-a30e-29d8dca2f91d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:58Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9aafbd36430c36f8d23c7ea72a4b97080a503c7f79c113d1861ed80c2140c2e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://944b0c438e2c4d4babd4e351233dd36f74a2e322de0895ac47454bfb37d137dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:57Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8tpjf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-7xnrl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.094071 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9919ba70-c8d1-4829-bc34-f3aa8266abe8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a93e520899a2846dc0cefe3c7202139908be046101bd7b7b57c4495aa70f3664\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://714dad76d51c3355cc0a4de1749bf3e960d0eefd2bcb82cf264981941c1fc754\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.102895 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.102922 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.102929 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.102941 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.102950 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.109352 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://31292f748a595e673777cec51d1e5fa461f59dc2fa882234ac44b44674340b53\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.122432 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-cps4h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"91c7cec2-2aab-4cab-a055-c7994cb11d17\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6d2eab3d96d7e3f3a9866c46577930d19b0914c503e1cc1109cae388e2efde2b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-mtwqt\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:43Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-cps4h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.149746 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:44Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T01:11:40Z\\\",\\\"message\\\":\\\"for removal\\\\nI1205 01:11:38.628645 6551 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1205 01:11:38.628651 6551 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1205 01:11:38.628668 6551 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1205 01:11:38.628675 6551 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 01:11:38.628645 6551 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.628808 6551 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1205 01:11:38.629067 6551 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 01:11:38.629177 6551 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629269 6551 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 01:11:38.629070 6551 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 01:11:38.629934 6551 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1205 01:11:38.629985 6551 factory.go:656] Stopping watch factory\\\\nI1205 01:11:38.629998 6551 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T01:11:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bklrv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:44Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-2bmn9\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.162426 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-mmcgt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a4ff6abc-ff7f-425a-a60e-b8923b7294a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:52Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0a9b48619fd2becc6de07b20babfe9ab03f2ebadce13ac81b88a41738dd58c03\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:51Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-kshh4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:50Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-mmcgt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.174270 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e1e639aa-4bf7-4baa-a332-62dffec786d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:57Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rjznw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:57Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-xhbdk\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.191079 4665 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"237afd26-53a3-405c-94f2-9bea533dd959\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T01:10:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4d22cbf57da1230d0784f8b098c356498e8ba5c8545e253778b9e67a676abcca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://012915edd2ae310a9ac17f8394d7e039e630c7c89d2794ef916d5c70856fff89\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a696a55c4e5aa9305211fc71334587a3a39224101353d35924e181820c2f33fc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb48e509a210c6d542a95c4fbec75a380c293c303953c8d140ff913049da0fea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5d4f3adb596ad1e9346f183a23eb11ba017d0995f3d866f6f1f31a70fc836d31\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T01:10:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0e43521f7038a6b53193875c03fc4dc7bed96ef21db937e333a23c1fc20f255e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e1fbe7132697d5dab5e73be400e4cb324b8a48f63989299054fd98665f330c84\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:26Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8cdac84a0c7aac6020852a75c7bd1800cbabaab7e56bbd1a5269bc8cac98543e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T01:10:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T01:10:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T01:10:24Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T01:12:05Z is after 2025-08-24T17:21:41Z" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.204217 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.204251 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.204261 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.204278 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.204289 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.306473 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.306524 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.306535 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.306553 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.306566 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.409618 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.409664 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.409678 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.409696 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.409710 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.512555 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.512596 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.512607 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.512623 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.512633 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.615482 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.615526 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.615541 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.615557 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.615568 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.718766 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.718830 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.718840 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.718858 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.718869 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.821457 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.821529 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.821542 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.821559 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.821572 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.892738 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:05 crc kubenswrapper[4665]: E1205 01:12:05.893065 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.925205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.925253 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.925264 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.925282 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:05 crc kubenswrapper[4665]: I1205 01:12:05.925312 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:05Z","lastTransitionTime":"2025-12-05T01:12:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.027885 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.027930 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.027945 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.027961 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.028226 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.130010 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.130040 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.130047 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.130061 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.130069 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.231693 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.231728 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.231740 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.231758 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.231768 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.333925 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.333963 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.333971 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.333983 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.333993 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.436012 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.436045 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.436053 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.436066 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.436075 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.538505 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.538559 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.538583 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.538601 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.538612 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.640497 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.640540 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.640556 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.640578 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.640594 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.743526 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.743595 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.743612 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.743635 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.743653 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.845772 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.845841 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.845860 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.845889 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.845909 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.893603 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.893691 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:06 crc kubenswrapper[4665]: E1205 01:12:06.893870 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.893933 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:06 crc kubenswrapper[4665]: E1205 01:12:06.894017 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:06 crc kubenswrapper[4665]: E1205 01:12:06.894157 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.948146 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.948188 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.948200 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.948213 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:06 crc kubenswrapper[4665]: I1205 01:12:06.948222 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:06Z","lastTransitionTime":"2025-12-05T01:12:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.050411 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.050458 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.050468 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.050489 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.050505 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.153009 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.153064 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.153075 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.153092 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.153105 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.255237 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.255283 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.255388 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.255412 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.255426 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.358029 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.358065 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.358073 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.358085 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.358094 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.460318 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.460391 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.460400 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.460412 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.460423 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.563633 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.563727 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.563746 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.563769 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.563786 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.667009 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.667056 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.667068 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.667086 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.667098 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.770350 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.770444 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.770465 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.770488 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.770505 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.825158 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.825205 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.825216 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.825233 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.825248 4665 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T01:12:07Z","lastTransitionTime":"2025-12-05T01:12:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.892945 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:07 crc kubenswrapper[4665]: E1205 01:12:07.893128 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.901609 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m"] Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.902069 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.903831 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.904031 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.905078 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.905407 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.942480 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-j22m9" podStartSLOduration=84.9424568 podStartE2EDuration="1m24.9424568s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:07.942381888 +0000 UTC m=+103.281774207" watchObservedRunningTime="2025-12-05 01:12:07.9424568 +0000 UTC m=+103.281849109" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.966823 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8c7a8d1a-6f13-4d65-882f-271565536c05-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.966874 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c7a8d1a-6f13-4d65-882f-271565536c05-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.966903 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8c7a8d1a-6f13-4d65-882f-271565536c05-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.966930 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8c7a8d1a-6f13-4d65-882f-271565536c05-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.966948 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7a8d1a-6f13-4d65-882f-271565536c05-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.984735 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-jl867" podStartSLOduration=84.984715994 podStartE2EDuration="1m24.984715994s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:07.968016139 +0000 UTC m=+103.307408468" watchObservedRunningTime="2025-12-05 01:12:07.984715994 +0000 UTC m=+103.324108303" Dec 05 01:12:07 crc kubenswrapper[4665]: I1205 01:12:07.985100 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=85.985089333 podStartE2EDuration="1m25.985089333s" podCreationTimestamp="2025-12-05 01:10:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:07.985030882 +0000 UTC m=+103.324423181" watchObservedRunningTime="2025-12-05 01:12:07.985089333 +0000 UTC m=+103.324481642" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.006768 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=53.006741319 podStartE2EDuration="53.006741319s" podCreationTimestamp="2025-12-05 01:11:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.006738679 +0000 UTC m=+103.346130978" watchObservedRunningTime="2025-12-05 01:12:08.006741319 +0000 UTC m=+103.346133638" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.068398 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c7a8d1a-6f13-4d65-882f-271565536c05-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.068445 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8c7a8d1a-6f13-4d65-882f-271565536c05-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.068494 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8c7a8d1a-6f13-4d65-882f-271565536c05-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.068518 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8c7a8d1a-6f13-4d65-882f-271565536c05-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.068558 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7a8d1a-6f13-4d65-882f-271565536c05-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.068589 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/8c7a8d1a-6f13-4d65-882f-271565536c05-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.068618 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/8c7a8d1a-6f13-4d65-882f-271565536c05-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.069313 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/8c7a8d1a-6f13-4d65-882f-271565536c05-service-ca\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.076894 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c7a8d1a-6f13-4d65-882f-271565536c05-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.114942 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c7a8d1a-6f13-4d65-882f-271565536c05-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-kvz2m\" (UID: \"8c7a8d1a-6f13-4d65-882f-271565536c05\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.129743 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=78.129716161 podStartE2EDuration="1m18.129716161s" podCreationTimestamp="2025-12-05 01:10:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.128252236 +0000 UTC m=+103.467644535" watchObservedRunningTime="2025-12-05 01:12:08.129716161 +0000 UTC m=+103.469108460" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.155986 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podStartSLOduration=85.155958707 podStartE2EDuration="1m25.155958707s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.155047975 +0000 UTC m=+103.494440274" watchObservedRunningTime="2025-12-05 01:12:08.155958707 +0000 UTC m=+103.495351016" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.167084 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-7xnrl" podStartSLOduration=84.167058996 podStartE2EDuration="1m24.167058996s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.166708128 +0000 UTC m=+103.506100437" watchObservedRunningTime="2025-12-05 01:12:08.167058996 +0000 UTC m=+103.506451295" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.189681 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=38.189658885 podStartE2EDuration="38.189658885s" podCreationTimestamp="2025-12-05 01:11:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.187265887 +0000 UTC m=+103.526658186" watchObservedRunningTime="2025-12-05 01:12:08.189658885 +0000 UTC m=+103.529051184" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.209370 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-cps4h" podStartSLOduration=85.209351382 podStartE2EDuration="1m25.209351382s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.209033904 +0000 UTC m=+103.548426203" watchObservedRunningTime="2025-12-05 01:12:08.209351382 +0000 UTC m=+103.548743681" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.217242 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.247349 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-mmcgt" podStartSLOduration=84.247331213 podStartE2EDuration="1m24.247331213s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.243805187 +0000 UTC m=+103.583197486" watchObservedRunningTime="2025-12-05 01:12:08.247331213 +0000 UTC m=+103.586723512" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.265002 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=82.264986131 podStartE2EDuration="1m22.264986131s" podCreationTimestamp="2025-12-05 01:10:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.264707144 +0000 UTC m=+103.604099443" watchObservedRunningTime="2025-12-05 01:12:08.264986131 +0000 UTC m=+103.604378430" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.442177 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" event={"ID":"8c7a8d1a-6f13-4d65-882f-271565536c05","Type":"ContainerStarted","Data":"6586ed51a70aebe147033f9d74c28bfc5ccc66499ac8dde2b6e5e300e00d7b07"} Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.442236 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" event={"ID":"8c7a8d1a-6f13-4d65-882f-271565536c05","Type":"ContainerStarted","Data":"7d31ffc30b8913caf4d0f9d201a3d642928afafff1235a74ba8dd7ef9dbe73d4"} Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.459822 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-kvz2m" podStartSLOduration=85.459804026 podStartE2EDuration="1m25.459804026s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:08.459097109 +0000 UTC m=+103.798489408" watchObservedRunningTime="2025-12-05 01:12:08.459804026 +0000 UTC m=+103.799196335" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.892664 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:08 crc kubenswrapper[4665]: E1205 01:12:08.892874 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.892693 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:08 crc kubenswrapper[4665]: E1205 01:12:08.892998 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:08 crc kubenswrapper[4665]: I1205 01:12:08.892675 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:08 crc kubenswrapper[4665]: E1205 01:12:08.893090 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:09 crc kubenswrapper[4665]: I1205 01:12:09.892565 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:09 crc kubenswrapper[4665]: E1205 01:12:09.892775 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:10 crc kubenswrapper[4665]: I1205 01:12:10.892589 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:10 crc kubenswrapper[4665]: I1205 01:12:10.892627 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:10 crc kubenswrapper[4665]: I1205 01:12:10.892779 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:10 crc kubenswrapper[4665]: E1205 01:12:10.892911 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:10 crc kubenswrapper[4665]: E1205 01:12:10.893016 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:10 crc kubenswrapper[4665]: E1205 01:12:10.893178 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:11 crc kubenswrapper[4665]: I1205 01:12:11.892397 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:11 crc kubenswrapper[4665]: E1205 01:12:11.892984 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:12 crc kubenswrapper[4665]: I1205 01:12:12.892812 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:12 crc kubenswrapper[4665]: I1205 01:12:12.892942 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:12 crc kubenswrapper[4665]: I1205 01:12:12.893059 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:12 crc kubenswrapper[4665]: E1205 01:12:12.893052 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:12 crc kubenswrapper[4665]: E1205 01:12:12.893234 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:12 crc kubenswrapper[4665]: E1205 01:12:12.893338 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:13 crc kubenswrapper[4665]: I1205 01:12:13.892954 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:13 crc kubenswrapper[4665]: E1205 01:12:13.893101 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:14 crc kubenswrapper[4665]: I1205 01:12:14.893282 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:14 crc kubenswrapper[4665]: E1205 01:12:14.894607 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:14 crc kubenswrapper[4665]: I1205 01:12:14.894652 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:14 crc kubenswrapper[4665]: I1205 01:12:14.894703 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:14 crc kubenswrapper[4665]: E1205 01:12:14.894840 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:14 crc kubenswrapper[4665]: E1205 01:12:14.895017 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:15 crc kubenswrapper[4665]: I1205 01:12:15.892512 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:15 crc kubenswrapper[4665]: E1205 01:12:15.893110 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:16 crc kubenswrapper[4665]: I1205 01:12:16.892885 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:16 crc kubenswrapper[4665]: I1205 01:12:16.892976 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:16 crc kubenswrapper[4665]: E1205 01:12:16.893056 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:16 crc kubenswrapper[4665]: I1205 01:12:16.893106 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:16 crc kubenswrapper[4665]: E1205 01:12:16.893281 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:16 crc kubenswrapper[4665]: E1205 01:12:16.893464 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:17 crc kubenswrapper[4665]: I1205 01:12:17.473365 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/1.log" Dec 05 01:12:17 crc kubenswrapper[4665]: I1205 01:12:17.474188 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/0.log" Dec 05 01:12:17 crc kubenswrapper[4665]: I1205 01:12:17.474262 4665 generic.go:334] "Generic (PLEG): container finished" podID="be34b4a6-0156-4e21-bae6-12af18583b0d" containerID="064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068" exitCode=1 Dec 05 01:12:17 crc kubenswrapper[4665]: I1205 01:12:17.474348 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j22m9" event={"ID":"be34b4a6-0156-4e21-bae6-12af18583b0d","Type":"ContainerDied","Data":"064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068"} Dec 05 01:12:17 crc kubenswrapper[4665]: I1205 01:12:17.474434 4665 scope.go:117] "RemoveContainer" containerID="84f556941c425721350d7d7bb185423238295f3705c9bb63ab5cc6de04ace954" Dec 05 01:12:17 crc kubenswrapper[4665]: I1205 01:12:17.475434 4665 scope.go:117] "RemoveContainer" containerID="064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068" Dec 05 01:12:17 crc kubenswrapper[4665]: E1205 01:12:17.475858 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-j22m9_openshift-multus(be34b4a6-0156-4e21-bae6-12af18583b0d)\"" pod="openshift-multus/multus-j22m9" podUID="be34b4a6-0156-4e21-bae6-12af18583b0d" Dec 05 01:12:17 crc kubenswrapper[4665]: I1205 01:12:17.893370 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:17 crc kubenswrapper[4665]: E1205 01:12:17.893576 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:18 crc kubenswrapper[4665]: I1205 01:12:18.480690 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/1.log" Dec 05 01:12:18 crc kubenswrapper[4665]: I1205 01:12:18.893389 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:18 crc kubenswrapper[4665]: I1205 01:12:18.893493 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:18 crc kubenswrapper[4665]: I1205 01:12:18.893615 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:18 crc kubenswrapper[4665]: E1205 01:12:18.893608 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:18 crc kubenswrapper[4665]: E1205 01:12:18.893755 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:18 crc kubenswrapper[4665]: E1205 01:12:18.893875 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:19 crc kubenswrapper[4665]: I1205 01:12:19.893466 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:19 crc kubenswrapper[4665]: E1205 01:12:19.893648 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:19 crc kubenswrapper[4665]: I1205 01:12:19.894208 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:12:19 crc kubenswrapper[4665]: E1205 01:12:19.894381 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-2bmn9_openshift-ovn-kubernetes(76af84f2-4935-4e7f-8fc6-b51adcfeebc4)\"" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" Dec 05 01:12:20 crc kubenswrapper[4665]: I1205 01:12:20.892710 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:20 crc kubenswrapper[4665]: I1205 01:12:20.892782 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:20 crc kubenswrapper[4665]: E1205 01:12:20.892901 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:20 crc kubenswrapper[4665]: I1205 01:12:20.892986 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:20 crc kubenswrapper[4665]: E1205 01:12:20.893187 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:20 crc kubenswrapper[4665]: E1205 01:12:20.893464 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:21 crc kubenswrapper[4665]: I1205 01:12:21.893401 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:21 crc kubenswrapper[4665]: E1205 01:12:21.893512 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:22 crc kubenswrapper[4665]: I1205 01:12:22.893228 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:22 crc kubenswrapper[4665]: I1205 01:12:22.893249 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:22 crc kubenswrapper[4665]: E1205 01:12:22.893390 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:22 crc kubenswrapper[4665]: E1205 01:12:22.893542 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:22 crc kubenswrapper[4665]: I1205 01:12:22.893635 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:22 crc kubenswrapper[4665]: E1205 01:12:22.893708 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:23 crc kubenswrapper[4665]: I1205 01:12:23.892821 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:23 crc kubenswrapper[4665]: E1205 01:12:23.893031 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:24 crc kubenswrapper[4665]: I1205 01:12:24.893480 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:24 crc kubenswrapper[4665]: I1205 01:12:24.893486 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:24 crc kubenswrapper[4665]: E1205 01:12:24.894497 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:24 crc kubenswrapper[4665]: I1205 01:12:24.894555 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:24 crc kubenswrapper[4665]: E1205 01:12:24.894685 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:24 crc kubenswrapper[4665]: E1205 01:12:24.894850 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:24 crc kubenswrapper[4665]: E1205 01:12:24.900169 4665 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 01:12:25 crc kubenswrapper[4665]: E1205 01:12:25.014700 4665 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 01:12:25 crc kubenswrapper[4665]: I1205 01:12:25.892690 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:25 crc kubenswrapper[4665]: E1205 01:12:25.892888 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:26 crc kubenswrapper[4665]: I1205 01:12:26.893550 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:26 crc kubenswrapper[4665]: I1205 01:12:26.893634 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:26 crc kubenswrapper[4665]: I1205 01:12:26.894451 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:26 crc kubenswrapper[4665]: E1205 01:12:26.894553 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:26 crc kubenswrapper[4665]: E1205 01:12:26.894731 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:26 crc kubenswrapper[4665]: E1205 01:12:26.894841 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:27 crc kubenswrapper[4665]: I1205 01:12:27.893128 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:27 crc kubenswrapper[4665]: E1205 01:12:27.893378 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:28 crc kubenswrapper[4665]: I1205 01:12:28.892483 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:28 crc kubenswrapper[4665]: I1205 01:12:28.892483 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:28 crc kubenswrapper[4665]: E1205 01:12:28.893594 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:28 crc kubenswrapper[4665]: I1205 01:12:28.892515 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:28 crc kubenswrapper[4665]: E1205 01:12:28.893515 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:28 crc kubenswrapper[4665]: E1205 01:12:28.893793 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:29 crc kubenswrapper[4665]: I1205 01:12:29.893353 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:29 crc kubenswrapper[4665]: E1205 01:12:29.894111 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:30 crc kubenswrapper[4665]: E1205 01:12:30.016169 4665 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 01:12:30 crc kubenswrapper[4665]: I1205 01:12:30.892920 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:30 crc kubenswrapper[4665]: E1205 01:12:30.893117 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:30 crc kubenswrapper[4665]: I1205 01:12:30.893363 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:30 crc kubenswrapper[4665]: E1205 01:12:30.893612 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:30 crc kubenswrapper[4665]: I1205 01:12:30.894359 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:30 crc kubenswrapper[4665]: E1205 01:12:30.894679 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:31 crc kubenswrapper[4665]: I1205 01:12:31.892695 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:31 crc kubenswrapper[4665]: E1205 01:12:31.892847 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:32 crc kubenswrapper[4665]: I1205 01:12:32.893146 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:32 crc kubenswrapper[4665]: I1205 01:12:32.893492 4665 scope.go:117] "RemoveContainer" containerID="064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068" Dec 05 01:12:32 crc kubenswrapper[4665]: E1205 01:12:32.893506 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:32 crc kubenswrapper[4665]: I1205 01:12:32.893560 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:32 crc kubenswrapper[4665]: E1205 01:12:32.893656 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:32 crc kubenswrapper[4665]: I1205 01:12:32.893523 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:32 crc kubenswrapper[4665]: E1205 01:12:32.893752 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:33 crc kubenswrapper[4665]: I1205 01:12:33.892922 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:33 crc kubenswrapper[4665]: E1205 01:12:33.893115 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:33 crc kubenswrapper[4665]: I1205 01:12:33.894483 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.574823 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/1.log" Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.574886 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j22m9" event={"ID":"be34b4a6-0156-4e21-bae6-12af18583b0d","Type":"ContainerStarted","Data":"95adcd9946fd2ca659a80fe447f9f72ba2ec042197f5e19940a5717bb7a4a837"} Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.577145 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/3.log" Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.579269 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerStarted","Data":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.579904 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.622790 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podStartSLOduration=111.622775867 podStartE2EDuration="1m51.622775867s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:34.621416564 +0000 UTC m=+129.960808873" watchObservedRunningTime="2025-12-05 01:12:34.622775867 +0000 UTC m=+129.962168166" Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.892967 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.893048 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.893060 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:34 crc kubenswrapper[4665]: E1205 01:12:34.894755 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:34 crc kubenswrapper[4665]: E1205 01:12:34.894869 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:34 crc kubenswrapper[4665]: E1205 01:12:34.894980 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.933861 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-xhbdk"] Dec 05 01:12:34 crc kubenswrapper[4665]: I1205 01:12:34.933997 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:34 crc kubenswrapper[4665]: E1205 01:12:34.934102 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:35 crc kubenswrapper[4665]: E1205 01:12:35.016785 4665 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 01:12:36 crc kubenswrapper[4665]: I1205 01:12:36.892981 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:36 crc kubenswrapper[4665]: E1205 01:12:36.893132 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:36 crc kubenswrapper[4665]: I1205 01:12:36.893337 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:36 crc kubenswrapper[4665]: E1205 01:12:36.893382 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:36 crc kubenswrapper[4665]: I1205 01:12:36.893390 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:36 crc kubenswrapper[4665]: I1205 01:12:36.893388 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:36 crc kubenswrapper[4665]: E1205 01:12:36.893553 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:36 crc kubenswrapper[4665]: E1205 01:12:36.893596 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:38 crc kubenswrapper[4665]: I1205 01:12:38.893521 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:38 crc kubenswrapper[4665]: I1205 01:12:38.893586 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:38 crc kubenswrapper[4665]: I1205 01:12:38.893613 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:38 crc kubenswrapper[4665]: I1205 01:12:38.893693 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:38 crc kubenswrapper[4665]: E1205 01:12:38.893747 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 01:12:38 crc kubenswrapper[4665]: E1205 01:12:38.893865 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 01:12:38 crc kubenswrapper[4665]: E1205 01:12:38.894037 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-xhbdk" podUID="e1e639aa-4bf7-4baa-a332-62dffec786d8" Dec 05 01:12:38 crc kubenswrapper[4665]: E1205 01:12:38.894190 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.893113 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.893166 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.893214 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.893125 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.895215 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.895867 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.896027 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.896081 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.896438 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 01:12:40 crc kubenswrapper[4665]: I1205 01:12:40.898198 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 01:12:44 crc kubenswrapper[4665]: I1205 01:12:44.922472 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:12:44 crc kubenswrapper[4665]: I1205 01:12:44.922824 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:12:48 crc kubenswrapper[4665]: I1205 01:12:48.948569 4665 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 01:12:48 crc kubenswrapper[4665]: I1205 01:12:48.999707 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7twnf"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.000436 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4m9lk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.000682 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.001322 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.001349 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.001768 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.007831 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.010230 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vl66w"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.011030 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.014441 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-86r58"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.015097 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.015893 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.017059 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.017483 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.017804 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fqjcd"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.018049 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.022550 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.028432 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.028632 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.028735 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031510 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-trusted-ca\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031562 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-etcd-serving-ca\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031583 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-config\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031601 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4d7c33e5-a79c-49f8-9d11-17fbe1879911-node-pullsecrets\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031616 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xjcmx\" (UniqueName: \"kubernetes.io/projected/f9f525e3-8aea-4e56-99ca-6a06cdafa897-kube-api-access-xjcmx\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031641 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-etcd-client\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031664 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031682 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9f525e3-8aea-4e56-99ca-6a06cdafa897-serving-cert\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031706 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-audit\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031724 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4d7c33e5-a79c-49f8-9d11-17fbe1879911-audit-dir\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031743 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031762 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fc878503-97d1-4f69-a607-1ccae9ca303a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-njkt2\" (UID: \"fc878503-97d1-4f69-a607-1ccae9ca303a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031781 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-serving-cert\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031800 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgvkd\" (UniqueName: \"kubernetes.io/projected/fc878503-97d1-4f69-a607-1ccae9ca303a-kube-api-access-pgvkd\") pod \"cluster-samples-operator-665b6dd947-njkt2\" (UID: \"fc878503-97d1-4f69-a607-1ccae9ca303a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031817 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-client-ca\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031832 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-config\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031851 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-encryption-config\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031891 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-serving-cert\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031909 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-config\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031927 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2kltf\" (UniqueName: \"kubernetes.io/projected/4d7c33e5-a79c-49f8-9d11-17fbe1879911-kube-api-access-2kltf\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031946 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-image-import-ca\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.031962 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twd8d\" (UniqueName: \"kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.033365 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.035427 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.037012 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.037495 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.037805 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.040122 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.041901 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.042047 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.042194 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.042339 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.042459 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.042593 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.042697 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 01:12:49 crc kubenswrapper[4665]: W1205 01:12:49.043145 4665 reflector.go:561] object-"openshift-console-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-console-operator": no relationship found between node 'crc' and this object Dec 05 01:12:49 crc kubenswrapper[4665]: E1205 01:12:49.043175 4665 reflector.go:158] "Unhandled Error" err="object-\"openshift-console-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-console-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.043223 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.043352 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.046809 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.047141 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.048448 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.048676 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.049267 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.049589 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.049727 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.049934 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.050393 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.050595 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.053540 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.075653 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.076067 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.076237 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.077273 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.077319 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.077532 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.077644 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.077767 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.077948 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.078039 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.077959 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.078432 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.078550 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.078599 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.078722 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.078770 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.079035 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.079641 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.080041 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.080076 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.080097 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.085209 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-xv889"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.085396 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.085463 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-hkzlk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.085670 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.085943 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.086028 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.086288 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-hkzlk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.086645 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.087189 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.087915 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.089434 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.090436 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.090555 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-42qzk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.091064 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.091413 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.091718 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.097439 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.097649 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.097714 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q7hzk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.097789 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.097992 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.098158 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rgvgv"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.098176 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.098850 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.099180 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.100627 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.100791 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.100905 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.101043 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.105180 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.105414 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.105701 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.105850 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.107191 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.107565 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.107643 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.107722 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.107842 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.107886 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.107972 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.108065 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.108250 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.108887 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.108922 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.109475 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.111138 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4m9lk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.114697 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.114886 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.115062 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.116062 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.117601 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.117865 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.118020 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.124339 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.126818 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.127042 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.127194 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.130685 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.131129 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.131344 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.131588 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132435 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-etcd-serving-ca\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132458 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-trusted-ca\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132482 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/948ea842-e99b-4bb1-88df-b58ad8c75e31-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132498 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-console-config\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132514 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-trusted-ca-bundle\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132531 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-config\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132547 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-config\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132562 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-serving-cert\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132576 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6x7b9\" (UniqueName: \"kubernetes.io/projected/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-kube-api-access-6x7b9\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132592 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/948ea842-e99b-4bb1-88df-b58ad8c75e31-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132613 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e25d774-afc2-4e37-9121-79f761e9e8d9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132630 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwdtb\" (UniqueName: \"kubernetes.io/projected/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-kube-api-access-xwdtb\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132660 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-machine-approver-tls\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132676 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-serving-cert\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132690 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132854 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-config\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.132871 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-encryption-config\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.134941 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-audit-policies\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135122 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-oauth-serving-cert\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135149 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-config\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135176 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4d7c33e5-a79c-49f8-9d11-17fbe1879911-node-pullsecrets\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135334 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xjcmx\" (UniqueName: \"kubernetes.io/projected/f9f525e3-8aea-4e56-99ca-6a06cdafa897-kube-api-access-xjcmx\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135360 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p8sn\" (UniqueName: \"kubernetes.io/projected/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-kube-api-access-8p8sn\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135517 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-etcd-client\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135545 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8j6mr\" (UniqueName: \"kubernetes.io/projected/948ea842-e99b-4bb1-88df-b58ad8c75e31-kube-api-access-8j6mr\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135573 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b62hk\" (UniqueName: \"kubernetes.io/projected/30a44c68-0827-46b3-ba7c-5aabf5cb34b1-kube-api-access-b62hk\") pod \"dns-operator-744455d44c-rgvgv\" (UID: \"30a44c68-0827-46b3-ba7c-5aabf5cb34b1\") " pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135630 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-service-ca\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135665 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9nkk\" (UniqueName: \"kubernetes.io/projected/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-kube-api-access-q9nkk\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135881 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135917 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9f525e3-8aea-4e56-99ca-6a06cdafa897-serving-cert\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.136869 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-config\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.139913 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/4d7c33e5-a79c-49f8-9d11-17fbe1879911-node-pullsecrets\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.139968 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-trusted-ca\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.140669 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-etcd-serving-ca\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.148244 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.135944 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-client\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.152827 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/30a44c68-0827-46b3-ba7c-5aabf5cb34b1-metrics-tls\") pod \"dns-operator-744455d44c-rgvgv\" (UID: \"30a44c68-0827-46b3-ba7c-5aabf5cb34b1\") " pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.152874 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b52fv\" (UniqueName: \"kubernetes.io/projected/d808dde0-4fdc-4d21-a6f2-3c27f540018f-kube-api-access-b52fv\") pod \"downloads-7954f5f757-hkzlk\" (UID: \"d808dde0-4fdc-4d21-a6f2-3c27f540018f\") " pod="openshift-console/downloads-7954f5f757-hkzlk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.152909 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d59a021-618b-40a7-b2b6-4013c084d74e-serving-cert\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.152948 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-audit\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.152979 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e25d774-afc2-4e37-9121-79f761e9e8d9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153011 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4d7c33e5-a79c-49f8-9d11-17fbe1879911-audit-dir\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153045 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-ca\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153078 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-service-ca-bundle\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153111 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-oauth-config\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153142 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153172 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-config\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153203 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk6xl\" (UniqueName: \"kubernetes.io/projected/2a2c26ae-4f48-465f-8808-6bb1a443bd89-kube-api-access-fk6xl\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153239 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34ce612a-9777-4777-b6af-e98a53b3fb57-serving-cert\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153387 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-serving-cert\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153442 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-client-ca\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153474 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fc878503-97d1-4f69-a607-1ccae9ca303a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-njkt2\" (UID: \"fc878503-97d1-4f69-a607-1ccae9ca303a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153516 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-audit-dir\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153536 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s9lp\" (UniqueName: \"kubernetes.io/projected/34ce612a-9777-4777-b6af-e98a53b3fb57-kube-api-access-8s9lp\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153553 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dd8l4\" (UniqueName: \"kubernetes.io/projected/5e25d774-afc2-4e37-9121-79f761e9e8d9-kube-api-access-dd8l4\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153574 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-config\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153592 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1d59a021-618b-40a7-b2b6-4013c084d74e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153609 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fkhrk\" (UniqueName: \"kubernetes.io/projected/1d59a021-618b-40a7-b2b6-4013c084d74e-kube-api-access-fkhrk\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153629 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-serving-cert\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153650 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgvkd\" (UniqueName: \"kubernetes.io/projected/fc878503-97d1-4f69-a607-1ccae9ca303a-kube-api-access-pgvkd\") pod \"cluster-samples-operator-665b6dd947-njkt2\" (UID: \"fc878503-97d1-4f69-a607-1ccae9ca303a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153696 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-etcd-client\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153719 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e25d774-afc2-4e37-9121-79f761e9e8d9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153740 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-auth-proxy-config\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153761 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-client-ca\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153784 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-config\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153801 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-encryption-config\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153820 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a2c26ae-4f48-465f-8808-6bb1a443bd89-serving-cert\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153867 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-images\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153889 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-serving-cert\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153908 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-config\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.153925 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.149074 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-etcd-client\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.158344 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9f525e3-8aea-4e56-99ca-6a06cdafa897-serving-cert\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.156147 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2kltf\" (UniqueName: \"kubernetes.io/projected/4d7c33e5-a79c-49f8-9d11-17fbe1879911-kube-api-access-2kltf\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.160828 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.161750 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pblps\" (UniqueName: \"kubernetes.io/projected/b88c79b6-4760-4509-bee0-06de439c6ac2-kube-api-access-pblps\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.164141 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-encryption-config\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.171166 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xv889"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.171209 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fqjcd"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.171218 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q7hzk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.171565 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.172812 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/fc878503-97d1-4f69-a607-1ccae9ca303a-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-njkt2\" (UID: \"fc878503-97d1-4f69-a607-1ccae9ca303a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.172902 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.173173 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-config\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.173363 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-config\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.174116 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-client-ca\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.174213 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/4d7c33e5-a79c-49f8-9d11-17fbe1879911-audit-dir\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.174270 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.174341 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-image-import-ca\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.174379 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twd8d\" (UniqueName: \"kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.174397 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-service-ca\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.174978 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-image-import-ca\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.176897 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-trusted-ca-bundle\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.177674 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.178081 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.178138 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-serving-cert\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.178638 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/4d7c33e5-a79c-49f8-9d11-17fbe1879911-audit\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.179665 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4d7c33e5-a79c-49f8-9d11-17fbe1879911-serving-cert\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.180959 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rgvgv"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.187926 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2kltf\" (UniqueName: \"kubernetes.io/projected/4d7c33e5-a79c-49f8-9d11-17fbe1879911-kube-api-access-2kltf\") pod \"apiserver-76f77b778f-vl66w\" (UID: \"4d7c33e5-a79c-49f8-9d11-17fbe1879911\") " pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.188652 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xjcmx\" (UniqueName: \"kubernetes.io/projected/f9f525e3-8aea-4e56-99ca-6a06cdafa897-kube-api-access-xjcmx\") pod \"controller-manager-879f6c89f-7twnf\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.192219 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgvkd\" (UniqueName: \"kubernetes.io/projected/fc878503-97d1-4f69-a607-1ccae9ca303a-kube-api-access-pgvkd\") pod \"cluster-samples-operator-665b6dd947-njkt2\" (UID: \"fc878503-97d1-4f69-a607-1ccae9ca303a\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.192807 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-42qzk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.194416 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7twnf"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.195959 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.197438 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.198875 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-hkzlk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.201348 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vl66w"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275586 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-ca\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275619 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-service-ca-bundle\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275638 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-oauth-config\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275661 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-config\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275675 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk6xl\" (UniqueName: \"kubernetes.io/projected/2a2c26ae-4f48-465f-8808-6bb1a443bd89-kube-api-access-fk6xl\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275691 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34ce612a-9777-4777-b6af-e98a53b3fb57-serving-cert\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275709 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-serving-cert\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275722 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-client-ca\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275737 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-config\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275763 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-audit-dir\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275777 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s9lp\" (UniqueName: \"kubernetes.io/projected/34ce612a-9777-4777-b6af-e98a53b3fb57-kube-api-access-8s9lp\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275793 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dd8l4\" (UniqueName: \"kubernetes.io/projected/5e25d774-afc2-4e37-9121-79f761e9e8d9-kube-api-access-dd8l4\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275809 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1d59a021-618b-40a7-b2b6-4013c084d74e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275826 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fkhrk\" (UniqueName: \"kubernetes.io/projected/1d59a021-618b-40a7-b2b6-4013c084d74e-kube-api-access-fkhrk\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275850 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-etcd-client\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275865 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e25d774-afc2-4e37-9121-79f761e9e8d9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275881 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-auth-proxy-config\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275937 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a2c26ae-4f48-465f-8808-6bb1a443bd89-serving-cert\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275971 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-images\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.275995 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276016 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276030 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pblps\" (UniqueName: \"kubernetes.io/projected/b88c79b6-4760-4509-bee0-06de439c6ac2-kube-api-access-pblps\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276045 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276071 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-service-ca\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276097 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/948ea842-e99b-4bb1-88df-b58ad8c75e31-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276112 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-console-config\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276128 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-trusted-ca-bundle\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276144 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-config\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276160 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-config\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276175 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-machine-approver-tls\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276190 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-serving-cert\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276204 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6x7b9\" (UniqueName: \"kubernetes.io/projected/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-kube-api-access-6x7b9\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276218 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/948ea842-e99b-4bb1-88df-b58ad8c75e31-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276233 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e25d774-afc2-4e37-9121-79f761e9e8d9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276249 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwdtb\" (UniqueName: \"kubernetes.io/projected/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-kube-api-access-xwdtb\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276273 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-serving-cert\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276287 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276318 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-encryption-config\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276334 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-audit-policies\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276348 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-oauth-serving-cert\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276363 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-config\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276379 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p8sn\" (UniqueName: \"kubernetes.io/projected/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-kube-api-access-8p8sn\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276395 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b62hk\" (UniqueName: \"kubernetes.io/projected/30a44c68-0827-46b3-ba7c-5aabf5cb34b1-kube-api-access-b62hk\") pod \"dns-operator-744455d44c-rgvgv\" (UID: \"30a44c68-0827-46b3-ba7c-5aabf5cb34b1\") " pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276425 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8j6mr\" (UniqueName: \"kubernetes.io/projected/948ea842-e99b-4bb1-88df-b58ad8c75e31-kube-api-access-8j6mr\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276446 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-service-ca\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.276462 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9nkk\" (UniqueName: \"kubernetes.io/projected/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-kube-api-access-q9nkk\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.277045 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.277133 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/1d59a021-618b-40a7-b2b6-4013c084d74e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.277508 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-client\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.277543 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/30a44c68-0827-46b3-ba7c-5aabf5cb34b1-metrics-tls\") pod \"dns-operator-744455d44c-rgvgv\" (UID: \"30a44c68-0827-46b3-ba7c-5aabf5cb34b1\") " pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.277562 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b52fv\" (UniqueName: \"kubernetes.io/projected/d808dde0-4fdc-4d21-a6f2-3c27f540018f-kube-api-access-b52fv\") pod \"downloads-7954f5f757-hkzlk\" (UID: \"d808dde0-4fdc-4d21-a6f2-3c27f540018f\") " pod="openshift-console/downloads-7954f5f757-hkzlk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.277583 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d59a021-618b-40a7-b2b6-4013c084d74e-serving-cert\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.277587 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-ca\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.277623 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e25d774-afc2-4e37-9121-79f761e9e8d9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.278165 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.278414 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-service-ca-bundle\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.278790 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-service-ca\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.279607 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5e25d774-afc2-4e37-9121-79f761e9e8d9-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.281623 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2a2c26ae-4f48-465f-8808-6bb1a443bd89-serving-cert\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.282586 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-audit-policies\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.282841 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-oauth-serving-cert\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.282940 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-serving-cert\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.283212 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-audit-dir\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.283790 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-images\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.283879 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.284340 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-config\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.284405 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-console-config\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.284452 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-service-ca\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.284938 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-auth-proxy-config\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.285281 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/948ea842-e99b-4bb1-88df-b58ad8c75e31-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.285517 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-client-ca\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.286081 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2a2c26ae-4f48-465f-8808-6bb1a443bd89-config\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.286103 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.286251 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-config\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.286318 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-config\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.286362 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-etcd-client\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.286472 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-trusted-ca-bundle\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.286664 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/5e25d774-afc2-4e37-9121-79f761e9e8d9-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.286687 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-oauth-config\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.287483 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/948ea842-e99b-4bb1-88df-b58ad8c75e31-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.287617 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-config\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.287858 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-machine-approver-tls\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.288050 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-encryption-config\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.288139 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/30a44c68-0827-46b3-ba7c-5aabf5cb34b1-metrics-tls\") pod \"dns-operator-744455d44c-rgvgv\" (UID: \"30a44c68-0827-46b3-ba7c-5aabf5cb34b1\") " pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.288564 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-etcd-client\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.289260 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1d59a021-618b-40a7-b2b6-4013c084d74e-serving-cert\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.291101 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-serving-cert\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.291704 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-serving-cert\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.292719 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34ce612a-9777-4777-b6af-e98a53b3fb57-serving-cert\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.315734 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dd8l4\" (UniqueName: \"kubernetes.io/projected/5e25d774-afc2-4e37-9121-79f761e9e8d9-kube-api-access-dd8l4\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.328885 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.336843 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fkhrk\" (UniqueName: \"kubernetes.io/projected/1d59a021-618b-40a7-b2b6-4013c084d74e-kube-api-access-fkhrk\") pod \"openshift-config-operator-7777fb866f-sjmw5\" (UID: \"1d59a021-618b-40a7-b2b6-4013c084d74e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.356902 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5e25d774-afc2-4e37-9121-79f761e9e8d9-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-45lxk\" (UID: \"5e25d774-afc2-4e37-9121-79f761e9e8d9\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.376594 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pblps\" (UniqueName: \"kubernetes.io/projected/b88c79b6-4760-4509-bee0-06de439c6ac2-kube-api-access-pblps\") pod \"console-f9d7485db-xv889\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.396740 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwdtb\" (UniqueName: \"kubernetes.io/projected/cfa776fb-8b8d-4cd1-941a-57e2672afdeb-kube-api-access-xwdtb\") pod \"machine-api-operator-5694c8668f-42qzk\" (UID: \"cfa776fb-8b8d-4cd1-941a-57e2672afdeb\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.414900 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk6xl\" (UniqueName: \"kubernetes.io/projected/2a2c26ae-4f48-465f-8808-6bb1a443bd89-kube-api-access-fk6xl\") pod \"authentication-operator-69f744f599-fqjcd\" (UID: \"2a2c26ae-4f48-465f-8808-6bb1a443bd89\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.419859 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.428997 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.436640 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6x7b9\" (UniqueName: \"kubernetes.io/projected/044a062b-0f88-42e6-9d3a-7a74dfa51ca5-kube-api-access-6x7b9\") pod \"etcd-operator-b45778765-q7hzk\" (UID: \"044a062b-0f88-42e6-9d3a-7a74dfa51ca5\") " pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.458212 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-qgtfk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.458894 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-7wl87"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.459346 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.460394 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.475018 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.476214 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.476893 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.477238 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.481723 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.483101 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.486923 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.489957 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.492170 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.498764 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.506686 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.507593 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.509190 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.511753 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.513452 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.513905 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.543566 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9nkk\" (UniqueName: \"kubernetes.io/projected/1cbef097-04e8-4837-9fbb-f37d6b9dafb2-kube-api-access-q9nkk\") pod \"apiserver-7bbb656c7d-d4sxm\" (UID: \"1cbef097-04e8-4837-9fbb-f37d6b9dafb2\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.549804 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s9lp\" (UniqueName: \"kubernetes.io/projected/34ce612a-9777-4777-b6af-e98a53b3fb57-kube-api-access-8s9lp\") pod \"route-controller-manager-6576b87f9c-t24dv\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.550010 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.550086 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.550610 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-6bhrf"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.550992 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.551262 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.551418 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.551852 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p8sn\" (UniqueName: \"kubernetes.io/projected/648c3d09-edc5-43d1-acad-e3a36aa4c0d4-kube-api-access-8p8sn\") pod \"machine-approver-56656f9798-86r58\" (UID: \"648c3d09-edc5-43d1-acad-e3a36aa4c0d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.551912 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sf489"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.552751 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqfg7"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.554623 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.552784 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.558059 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.559513 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.563502 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.565787 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.567932 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.568127 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.568200 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.568475 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.568495 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.573438 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.574710 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-7lq4t"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.575247 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.576350 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5jczg"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.577246 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.578397 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.579026 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.579387 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-6swmr"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.580289 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.581358 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8j6mr\" (UniqueName: \"kubernetes.io/projected/948ea842-e99b-4bb1-88df-b58ad8c75e31-kube-api-access-8j6mr\") pod \"openshift-controller-manager-operator-756b6f6bc6-7bl9r\" (UID: \"948ea842-e99b-4bb1-88df-b58ad8c75e31\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.582565 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.583145 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.584173 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.585180 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.587955 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.588516 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.588663 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.589390 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2478a25b-a8b7-44b5-8204-5862f15fb53d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.589495 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-default-certificate\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.589582 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-profile-collector-cert\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.589661 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8l8b\" (UniqueName: \"kubernetes.io/projected/10397df3-a399-4278-8909-538e0b8c3c01-kube-api-access-z8l8b\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.589742 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/191296ac-80a8-4b64-acf8-d0087ac08c79-service-ca-bundle\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.589814 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c9sx8\" (UniqueName: \"kubernetes.io/projected/191296ac-80a8-4b64-acf8-d0087ac08c79-kube-api-access-c9sx8\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.589885 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90d5f8b3-804d-4f32-81b9-230301d5a834-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.589978 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590057 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fnpfl\" (UniqueName: \"kubernetes.io/projected/6c57b974-85bf-41ea-83aa-096fe4228a87-kube-api-access-fnpfl\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590141 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-metrics-certs\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590220 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2595c30a-ace8-4674-8691-4353a9e55c46-bound-sa-token\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590325 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590402 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-config-volume\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590478 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-stats-auth\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590548 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6afac4e7-1280-46f1-9571-a8aaad37b32f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590618 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10397df3-a399-4278-8909-538e0b8c3c01-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590685 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znkf5\" (UniqueName: \"kubernetes.io/projected/6afac4e7-1280-46f1-9571-a8aaad37b32f-kube-api-access-znkf5\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590765 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10397df3-a399-4278-8909-538e0b8c3c01-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590836 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-config\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590923 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2478a25b-a8b7-44b5-8204-5862f15fb53d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.590994 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxthd\" (UniqueName: \"kubernetes.io/projected/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-kube-api-access-zxthd\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591073 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbn7g\" (UniqueName: \"kubernetes.io/projected/2595c30a-ace8-4674-8691-4353a9e55c46-kube-api-access-kbn7g\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591146 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90d5f8b3-804d-4f32-81b9-230301d5a834-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591227 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-srv-cert\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591319 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90d5f8b3-804d-4f32-81b9-230301d5a834-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591395 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dznf7\" (UniqueName: \"kubernetes.io/projected/7d763d3d-4925-4c67-8828-873c9a8dc973-kube-api-access-dznf7\") pod \"migrator-59844c95c7-snslb\" (UID: \"7d763d3d-4925-4c67-8828-873c9a8dc973\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591470 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-metrics-tls\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591650 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6afac4e7-1280-46f1-9571-a8aaad37b32f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591811 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2595c30a-ace8-4674-8691-4353a9e55c46-trusted-ca\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.591964 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2478a25b-a8b7-44b5-8204-5862f15fb53d-config\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.592070 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2595c30a-ace8-4674-8691-4353a9e55c46-metrics-tls\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.592916 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.604876 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.630089 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b62hk\" (UniqueName: \"kubernetes.io/projected/30a44c68-0827-46b3-ba7c-5aabf5cb34b1-kube-api-access-b62hk\") pod \"dns-operator-744455d44c-rgvgv\" (UID: \"30a44c68-0827-46b3-ba7c-5aabf5cb34b1\") " pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.641381 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b52fv\" (UniqueName: \"kubernetes.io/projected/d808dde0-4fdc-4d21-a6f2-3c27f540018f-kube-api-access-b52fv\") pod \"downloads-7954f5f757-hkzlk\" (UID: \"d808dde0-4fdc-4d21-a6f2-3c27f540018f\") " pod="openshift-console/downloads-7954f5f757-hkzlk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.678160 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.611594 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.625118 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.648164 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.664571 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.683321 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.685700 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbw2d"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.685871 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.686018 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-qgtfk"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.686031 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.686375 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.686570 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.688662 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-wdjtm"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.689259 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.697511 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6afac4e7-1280-46f1-9571-a8aaad37b32f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.697542 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10397df3-a399-4278-8909-538e0b8c3c01-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.697571 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znkf5\" (UniqueName: \"kubernetes.io/projected/6afac4e7-1280-46f1-9571-a8aaad37b32f-kube-api-access-znkf5\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.697594 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.697611 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-certs\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.697628 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-config\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.697647 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftxx8\" (UniqueName: \"kubernetes.io/projected/4eb1d49b-1c1f-448c-888a-63d7966b8480-kube-api-access-ftxx8\") pod \"multus-admission-controller-857f4d67dd-sf489\" (UID: \"4eb1d49b-1c1f-448c-888a-63d7966b8480\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.697665 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jq98p\" (UniqueName: \"kubernetes.io/projected/29ef4f10-a6b6-4551-8067-0a82efc5651d-kube-api-access-jq98p\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698004 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698029 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-k5gsl\" (UID: \"f20fec7f-c7bb-4bb5-b86c-076b8931aa97\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698058 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90d5f8b3-804d-4f32-81b9-230301d5a834-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698076 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-srv-cert\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698093 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-policies\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698108 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698127 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90d5f8b3-804d-4f32-81b9-230301d5a834-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698143 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbckp\" (UniqueName: \"kubernetes.io/projected/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-kube-api-access-rbckp\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698162 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d82a4e9d-7232-4652-baec-7f0e395afd4b-tmpfs\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698206 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-metrics-tls\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698338 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkjbl\" (UniqueName: \"kubernetes.io/projected/55c71265-8dce-4127-ae57-1f0850d20a80-kube-api-access-mkjbl\") pod \"package-server-manager-789f6589d5-794gc\" (UID: \"55c71265-8dce-4127-ae57-1f0850d20a80\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698363 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2595c30a-ace8-4674-8691-4353a9e55c46-trusted-ca\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698379 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2478a25b-a8b7-44b5-8204-5862f15fb53d-config\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698442 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698462 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/55c71265-8dce-4127-ae57-1f0850d20a80-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-794gc\" (UID: \"55c71265-8dce-4127-ae57-1f0850d20a80\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698479 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-mountpoint-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698494 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-chsk5\" (UniqueName: \"kubernetes.io/projected/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-kube-api-access-chsk5\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698513 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2478a25b-a8b7-44b5-8204-5862f15fb53d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698531 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-profile-collector-cert\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698549 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-registration-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698566 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-cert\") pod \"ingress-canary-7lq4t\" (UID: \"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b\") " pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698584 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/191296ac-80a8-4b64-acf8-d0087ac08c79-service-ca-bundle\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698609 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90d5f8b3-804d-4f32-81b9-230301d5a834-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698636 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698766 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hkh4p\" (UniqueName: \"kubernetes.io/projected/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-kube-api-access-hkh4p\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698784 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-metrics-certs\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698869 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-webhook-cert\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698896 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2sr6g\" (UniqueName: \"kubernetes.io/projected/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-kube-api-access-2sr6g\") pod \"control-plane-machine-set-operator-78cbb6b69f-k5gsl\" (UID: \"f20fec7f-c7bb-4bb5-b86c-076b8931aa97\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.698956 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2595c30a-ace8-4674-8691-4353a9e55c46-bound-sa-token\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699025 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699045 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-plugins-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699073 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699174 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-config-volume\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699650 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/191296ac-80a8-4b64-acf8-d0087ac08c79-service-ca-bundle\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699747 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-stats-auth\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699810 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10397df3-a399-4278-8909-538e0b8c3c01-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699815 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-config-volume\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699844 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699946 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4eb1d49b-1c1f-448c-888a-63d7966b8480-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sf489\" (UID: \"4eb1d49b-1c1f-448c-888a-63d7966b8480\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.699985 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2478a25b-a8b7-44b5-8204-5862f15fb53d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700098 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxthd\" (UniqueName: \"kubernetes.io/projected/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-kube-api-access-zxthd\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700129 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbn7g\" (UniqueName: \"kubernetes.io/projected/2595c30a-ace8-4674-8691-4353a9e55c46-kube-api-access-kbn7g\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700149 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-apiservice-cert\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700167 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700184 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700210 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dznf7\" (UniqueName: \"kubernetes.io/projected/7d763d3d-4925-4c67-8828-873c9a8dc973-kube-api-access-dznf7\") pod \"migrator-59844c95c7-snslb\" (UID: \"7d763d3d-4925-4c67-8828-873c9a8dc973\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700233 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bm7q\" (UniqueName: \"kubernetes.io/projected/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-kube-api-access-4bm7q\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700250 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h74x2\" (UniqueName: \"kubernetes.io/projected/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-kube-api-access-h74x2\") pod \"ingress-canary-7lq4t\" (UID: \"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b\") " pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700272 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b9a9d28c-11ea-4fef-ac00-13eb549271ff-proxy-tls\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700287 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700328 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f45pc\" (UniqueName: \"kubernetes.io/projected/d82a4e9d-7232-4652-baec-7f0e395afd4b-kube-api-access-f45pc\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700344 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-socket-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700365 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6afac4e7-1280-46f1-9571-a8aaad37b32f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700381 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvt6l\" (UniqueName: \"kubernetes.io/projected/b9a9d28c-11ea-4fef-ac00-13eb549271ff-kube-api-access-rvt6l\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700396 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700412 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-proxy-tls\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700432 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2595c30a-ace8-4674-8691-4353a9e55c46-metrics-tls\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700448 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700466 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-srv-cert\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700482 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-default-certificate\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700500 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-images\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700518 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8l8b\" (UniqueName: \"kubernetes.io/projected/10397df3-a399-4278-8909-538e0b8c3c01-kube-api-access-z8l8b\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700535 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-csi-data-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700553 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q9qh9\" (UniqueName: \"kubernetes.io/projected/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-kube-api-access-q9qh9\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700598 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c9sx8\" (UniqueName: \"kubernetes.io/projected/191296ac-80a8-4b64-acf8-d0087ac08c79-kube-api-access-c9sx8\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700620 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700674 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700700 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fnpfl\" (UniqueName: \"kubernetes.io/projected/6c57b974-85bf-41ea-83aa-096fe4228a87-kube-api-access-fnpfl\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700720 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-dir\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700736 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b9a9d28c-11ea-4fef-ac00-13eb549271ff-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700752 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700777 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.700799 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-node-bootstrap-token\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.703950 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.710700 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-default-certificate\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.710738 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sf489"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.704888 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.711688 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5jczg"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.718346 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-stats-auth\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.718157 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.723512 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.723836 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-7lq4t"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.728732 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqfg7"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.731421 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.750775 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.752895 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.756649 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.758546 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.760167 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.764904 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.767231 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.770432 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.771135 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/191296ac-80a8-4b64-acf8-d0087ac08c79-metrics-certs\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.777897 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.778934 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-metrics-tls\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.786048 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.791778 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.804090 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-6bhrf"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.807030 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.809252 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816044 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-wdjtm"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816191 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ccf81f6-7188-4348-b61e-cb5f347e888a-config\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816259 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816337 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4eb1d49b-1c1f-448c-888a-63d7966b8480-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sf489\" (UID: \"4eb1d49b-1c1f-448c-888a-63d7966b8480\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816381 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ebede5a-b1cb-4059-b158-ce9a85aed080-secret-volume\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816462 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ccf81f6-7188-4348-b61e-cb5f347e888a-serving-cert\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816592 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-apiservice-cert\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816694 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.816853 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.817026 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bm7q\" (UniqueName: \"kubernetes.io/projected/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-kube-api-access-4bm7q\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.817083 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h74x2\" (UniqueName: \"kubernetes.io/projected/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-kube-api-access-h74x2\") pod \"ingress-canary-7lq4t\" (UID: \"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b\") " pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.817184 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b9a9d28c-11ea-4fef-ac00-13eb549271ff-proxy-tls\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.817280 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.817370 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f45pc\" (UniqueName: \"kubernetes.io/projected/d82a4e9d-7232-4652-baec-7f0e395afd4b-kube-api-access-f45pc\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.817435 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-socket-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.817880 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-socket-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.818625 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvt6l\" (UniqueName: \"kubernetes.io/projected/b9a9d28c-11ea-4fef-ac00-13eb549271ff-kube-api-access-rvt6l\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.818702 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.818726 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-proxy-tls\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.818817 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.819607 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.819681 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-srv-cert\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.819757 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.820439 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-auth-proxy-config\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.819759 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-key\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821266 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-csi-data-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821307 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-images\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821394 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821445 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q9qh9\" (UniqueName: \"kubernetes.io/projected/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-kube-api-access-q9qh9\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821548 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-dir\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821602 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b9a9d28c-11ea-4fef-ac00-13eb549271ff-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821667 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821786 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821809 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-node-bootstrap-token\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821840 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-csi-data-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821898 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821944 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-certs\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.822249 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-dir\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.821996 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b2csh\" (UniqueName: \"kubernetes.io/projected/1ccf81f6-7188-4348-b61e-cb5f347e888a-kube-api-access-b2csh\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.822882 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftxx8\" (UniqueName: \"kubernetes.io/projected/4eb1d49b-1c1f-448c-888a-63d7966b8480-kube-api-access-ftxx8\") pod \"multus-admission-controller-857f4d67dd-sf489\" (UID: \"4eb1d49b-1c1f-448c-888a-63d7966b8480\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.822908 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jq98p\" (UniqueName: \"kubernetes.io/projected/29ef4f10-a6b6-4551-8067-0a82efc5651d-kube-api-access-jq98p\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.822980 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823011 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-k5gsl\" (UID: \"f20fec7f-c7bb-4bb5-b86c-076b8931aa97\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823062 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-policies\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823448 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/b9a9d28c-11ea-4fef-ac00-13eb549271ff-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823495 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823584 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823675 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbckp\" (UniqueName: \"kubernetes.io/projected/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-kube-api-access-rbckp\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823738 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d82a4e9d-7232-4652-baec-7f0e395afd4b-tmpfs\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823770 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkjbl\" (UniqueName: \"kubernetes.io/projected/55c71265-8dce-4127-ae57-1f0850d20a80-kube-api-access-mkjbl\") pod \"package-server-manager-789f6589d5-794gc\" (UID: \"55c71265-8dce-4127-ae57-1f0850d20a80\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823851 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823881 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/55c71265-8dce-4127-ae57-1f0850d20a80-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-794gc\" (UID: \"55c71265-8dce-4127-ae57-1f0850d20a80\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823906 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-mountpoint-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823948 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-chsk5\" (UniqueName: \"kubernetes.io/projected/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-kube-api-access-chsk5\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.823977 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-cabundle\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.824014 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-registration-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.824074 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-cert\") pod \"ingress-canary-7lq4t\" (UID: \"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b\") " pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.824193 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.824322 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d82a4e9d-7232-4652-baec-7f0e395afd4b-tmpfs\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.828130 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-registration-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.829714 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-mountpoint-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.829966 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hkh4p\" (UniqueName: \"kubernetes.io/projected/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-kube-api-access-hkh4p\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.830023 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fdf6b\" (UniqueName: \"kubernetes.io/projected/945b1001-04d4-4312-ab2c-39bddf4851f2-kube-api-access-fdf6b\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.830064 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-webhook-cert\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.830099 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2sr6g\" (UniqueName: \"kubernetes.io/projected/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-kube-api-access-2sr6g\") pod \"control-plane-machine-set-operator-78cbb6b69f-k5gsl\" (UID: \"f20fec7f-c7bb-4bb5-b86c-076b8931aa97\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.830128 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pmfl\" (UniqueName: \"kubernetes.io/projected/4ebede5a-b1cb-4059-b158-ce9a85aed080-kube-api-access-4pmfl\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.830159 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.830199 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-plugins-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.830392 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-plugins-dir\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.830994 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.833141 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbw2d"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.839745 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-hkzlk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.842618 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.861482 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.863875 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.871041 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2478a25b-a8b7-44b5-8204-5862f15fb53d-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.871922 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2478a25b-a8b7-44b5-8204-5862f15fb53d-config\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.887395 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.905891 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.910982 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.924099 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/2595c30a-ace8-4674-8691-4353a9e55c46-metrics-tls\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.929510 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.931100 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2595c30a-ace8-4674-8691-4353a9e55c46-trusted-ca\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.931871 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-key\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.932104 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b2csh\" (UniqueName: \"kubernetes.io/projected/1ccf81f6-7188-4348-b61e-cb5f347e888a-kube-api-access-b2csh\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.932287 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-cabundle\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.932404 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fdf6b\" (UniqueName: \"kubernetes.io/projected/945b1001-04d4-4312-ab2c-39bddf4851f2-kube-api-access-fdf6b\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.932459 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pmfl\" (UniqueName: \"kubernetes.io/projected/4ebede5a-b1cb-4059-b158-ce9a85aed080-kube-api-access-4pmfl\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.932502 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ccf81f6-7188-4348-b61e-cb5f347e888a-config\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.932560 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ebede5a-b1cb-4059-b158-ce9a85aed080-secret-volume\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.932605 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ccf81f6-7188-4348-b61e-cb5f347e888a-serving-cert\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.932751 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.944441 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.954314 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-fqjcd"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.959712 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7twnf"] Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.962679 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 01:12:49 crc kubenswrapper[4665]: I1205 01:12:49.982618 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 01:12:49 crc kubenswrapper[4665]: W1205 01:12:49.995141 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2a2c26ae_4f48_465f_8808_6bb1a443bd89.slice/crio-2282f003cfe4898cd91aa6b7abb559af6af0b83dcd319c6a6a3b30d110eff5e0 WatchSource:0}: Error finding container 2282f003cfe4898cd91aa6b7abb559af6af0b83dcd319c6a6a3b30d110eff5e0: Status 404 returned error can't find the container with id 2282f003cfe4898cd91aa6b7abb559af6af0b83dcd319c6a6a3b30d110eff5e0 Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.002694 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.023673 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.026157 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/90d5f8b3-804d-4f32-81b9-230301d5a834-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.030961 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/90d5f8b3-804d-4f32-81b9-230301d5a834-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.051655 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.058289 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/10397df3-a399-4278-8909-538e0b8c3c01-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.073362 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-vl66w"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.078913 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.088229 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.088813 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/10397df3-a399-4278-8909-538e0b8c3c01-config\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.091650 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.103319 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.124775 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.143839 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.166240 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.182969 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.201734 4665 projected.go:288] Couldn't get configMap openshift-console-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.201795 4665 projected.go:194] Error preparing data for projected volume kube-api-access-twd8d for pod openshift-console-operator/console-operator-58897d9998-4m9lk: failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.201881 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d podName:9c541bcd-d60a-4e73-8bcd-2d502eebbb9a nodeName:}" failed. No retries permitted until 2025-12-05 01:12:50.701857597 +0000 UTC m=+146.041249896 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-twd8d" (UniqueName: "kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d") pod "console-operator-58897d9998-4m9lk" (UID: "9c541bcd-d60a-4e73-8bcd-2d502eebbb9a") : failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.203762 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.229017 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.243620 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.259164 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6afac4e7-1280-46f1-9571-a8aaad37b32f-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.263234 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.270203 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6afac4e7-1280-46f1-9571-a8aaad37b32f-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.284727 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.304668 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-q7hzk"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.304885 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.312465 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.319187 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-42qzk"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.324827 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.329594 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-xv889"] Dec 05 01:12:50 crc kubenswrapper[4665]: W1205 01:12:50.333196 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod044a062b_0f88_42e6_9d3a_7a74dfa51ca5.slice/crio-7e3bbb0076fb72b50133ef5d1c54f0c18997395e0e3f3960d0f3d2f7f6ac11c4 WatchSource:0}: Error finding container 7e3bbb0076fb72b50133ef5d1c54f0c18997395e0e3f3960d0f3d2f7f6ac11c4: Status 404 returned error can't find the container with id 7e3bbb0076fb72b50133ef5d1c54f0c18997395e0e3f3960d0f3d2f7f6ac11c4 Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.344528 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 01:12:50 crc kubenswrapper[4665]: W1205 01:12:50.353066 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcfa776fb_8b8d_4cd1_941a_57e2672afdeb.slice/crio-62aaf5d94e515b56c1bfe3a00d3a563e52fd5f08c0ecd008d9111504db3b90ea WatchSource:0}: Error finding container 62aaf5d94e515b56c1bfe3a00d3a563e52fd5f08c0ecd008d9111504db3b90ea: Status 404 returned error can't find the container with id 62aaf5d94e515b56c1bfe3a00d3a563e52fd5f08c0ecd008d9111504db3b90ea Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.356892 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.364931 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.368969 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-config\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.387319 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.389140 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.403901 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.423562 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.432867 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-images\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.450974 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.461910 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.467824 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.468050 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.486839 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.497316 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.500509 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.507925 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.511915 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.515828 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.524690 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.537329 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-rgvgv"] Dec 05 01:12:50 crc kubenswrapper[4665]: W1205 01:12:50.537995 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod34ce612a_9777_4777_b6af_e98a53b3fb57.slice/crio-6d2e4d4cdb6448c5da780f8f4c9822c7ea47d4df3f0c489fbc5ca5f1fef2a695 WatchSource:0}: Error finding container 6d2e4d4cdb6448c5da780f8f4c9822c7ea47d4df3f0c489fbc5ca5f1fef2a695: Status 404 returned error can't find the container with id 6d2e4d4cdb6448c5da780f8f4c9822c7ea47d4df3f0c489fbc5ca5f1fef2a695 Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.538120 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.544666 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.552184 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.562753 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.562856 4665 request.go:700] Waited for 1.011130611s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/secrets?fieldSelector=metadata.name%3Dv4-0-config-system-serving-cert&limit=500&resourceVersion=0 Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.564809 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.578656 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.582510 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.624960 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.626305 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.628289 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.640080 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.642658 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.663964 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.664703 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-hkzlk"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.674416 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm"] Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.690029 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.694267 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-policies\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.699455 4665 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/catalog-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.699564 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-srv-cert podName:6c57b974-85bf-41ea-83aa-096fe4228a87 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.199537116 +0000 UTC m=+146.538929495 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-srv-cert") pod "catalog-operator-68c6474976-thm7j" (UID: "6c57b974-85bf-41ea-83aa-096fe4228a87") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.699751 4665 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/pprof-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.699806 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-profile-collector-cert podName:6c57b974-85bf-41ea-83aa-096fe4228a87 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.199786922 +0000 UTC m=+146.539179311 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "profile-collector-cert" (UniqueName: "kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-profile-collector-cert") pod "catalog-operator-68c6474976-thm7j" (UID: "6c57b974-85bf-41ea-83aa-096fe4228a87") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.708051 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.712712 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.722518 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.731514 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" event={"ID":"fc878503-97d1-4f69-a607-1ccae9ca303a","Type":"ContainerStarted","Data":"1489456e7f10b50c0a10577380c841632dbbc869755afe9417dc810bbf68a604"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.737768 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" event={"ID":"648c3d09-edc5-43d1-acad-e3a36aa4c0d4","Type":"ContainerStarted","Data":"27d4ce73bab9a4dc5f90b7322eb889ec8348e2b20a91a718e9aee42e034eb2bb"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.737813 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" event={"ID":"648c3d09-edc5-43d1-acad-e3a36aa4c0d4","Type":"ContainerStarted","Data":"b620cdf7cefbea08eedc19320d2e530d5566cb81476515d26a4c0dc64865c1ac"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.742120 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.746952 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" event={"ID":"cfa776fb-8b8d-4cd1-941a-57e2672afdeb","Type":"ContainerStarted","Data":"62aaf5d94e515b56c1bfe3a00d3a563e52fd5f08c0ecd008d9111504db3b90ea"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.749048 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" event={"ID":"044a062b-0f88-42e6-9d3a-7a74dfa51ca5","Type":"ContainerStarted","Data":"7e3bbb0076fb72b50133ef5d1c54f0c18997395e0e3f3960d0f3d2f7f6ac11c4"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.753632 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xv889" event={"ID":"b88c79b6-4760-4509-bee0-06de439c6ac2","Type":"ContainerStarted","Data":"0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.753681 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xv889" event={"ID":"b88c79b6-4760-4509-bee0-06de439c6ac2","Type":"ContainerStarted","Data":"c6884fd73e5ece5257883795af30ced87d03432db11f8ab572a25115635ee678"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.755981 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twd8d\" (UniqueName: \"kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.756357 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" event={"ID":"34ce612a-9777-4777-b6af-e98a53b3fb57","Type":"ContainerStarted","Data":"6d2e4d4cdb6448c5da780f8f4c9822c7ea47d4df3f0c489fbc5ca5f1fef2a695"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.758495 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-hkzlk" event={"ID":"d808dde0-4fdc-4d21-a6f2-3c27f540018f","Type":"ContainerStarted","Data":"b8c8d3c7655e33b2c20a287931f077f26d7b8cde088fe5a7fba119fbe9943f8c"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.763224 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.767580 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ebede5a-b1cb-4059-b158-ce9a85aed080-secret-volume\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.776567 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-profile-collector-cert\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.782951 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.790013 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" event={"ID":"f9f525e3-8aea-4e56-99ca-6a06cdafa897","Type":"ContainerStarted","Data":"9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.790047 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" event={"ID":"f9f525e3-8aea-4e56-99ca-6a06cdafa897","Type":"ContainerStarted","Data":"af8c95e690d6fd0ee5d5c1529890d1f87386681387c783656b61b97c25c6658c"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.792702 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.796100 4665 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7twnf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.796133 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" podUID="f9f525e3-8aea-4e56-99ca-6a06cdafa897" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.798043 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" event={"ID":"1d59a021-618b-40a7-b2b6-4013c084d74e","Type":"ContainerStarted","Data":"da38ac7e6b979187902c7ba1fcbf906efe12b22d8403a54919ca75224416dd6f"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.798092 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" event={"ID":"1d59a021-618b-40a7-b2b6-4013c084d74e","Type":"ContainerStarted","Data":"d8085f28b8362174fbbb1da944dda5a829f97d57a4fcbfa7d2adc39a2cf21842"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.802661 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.812227 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" event={"ID":"5e25d774-afc2-4e37-9121-79f761e9e8d9","Type":"ContainerStarted","Data":"b529d79f378ff389ee70c37e801435bd31892abd21b9266a23ed5d6aa841537f"} Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.816970 4665 secret.go:188] Couldn't get secret openshift-multus/multus-admission-controller-secret: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.817052 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/4eb1d49b-1c1f-448c-888a-63d7966b8480-webhook-certs podName:4eb1d49b-1c1f-448c-888a-63d7966b8480 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.317032576 +0000 UTC m=+146.656424875 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/4eb1d49b-1c1f-448c-888a-63d7966b8480-webhook-certs") pod "multus-admission-controller-857f4d67dd-sf489" (UID: "4eb1d49b-1c1f-448c-888a-63d7966b8480") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.817075 4665 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.817096 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-apiservice-cert podName:d82a4e9d-7232-4652-baec-7f0e395afd4b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.317090267 +0000 UTC m=+146.656482566 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "apiservice-cert" (UniqueName: "kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-apiservice-cert") pod "packageserver-d55dfcdfc-55kmh" (UID: "d82a4e9d-7232-4652-baec-7f0e395afd4b") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.819288 4665 secret.go:188] Couldn't get secret openshift-machine-config-operator/mcc-proxy-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.819401 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b9a9d28c-11ea-4fef-ac00-13eb549271ff-proxy-tls podName:b9a9d28c-11ea-4fef-ac00-13eb549271ff nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.319383122 +0000 UTC m=+146.658775421 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/b9a9d28c-11ea-4fef-ac00-13eb549271ff-proxy-tls") pod "machine-config-controller-84d6567774-h4gnf" (UID: "b9a9d28c-11ea-4fef-ac00-13eb549271ff") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.821809 4665 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/olm-operator-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.821849 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-srv-cert podName:cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.321839562 +0000 UTC m=+146.661231861 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "srv-cert" (UniqueName: "kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-srv-cert") pod "olm-operator-6b444d44fb-9bxhg" (UID: "cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823258 4665 secret.go:188] Couldn't get secret openshift-machine-config-operator/machine-config-server-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823332 4665 secret.go:188] Couldn't get secret openshift-machine-config-operator/node-bootstrapper-token: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823349 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-certs podName:fb9e84d7-85d4-4d27-b238-cb79eebe9cd6 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.323330638 +0000 UTC m=+146.662723007 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "certs" (UniqueName: "kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-certs") pod "machine-config-server-6swmr" (UID: "fb9e84d7-85d4-4d27-b238-cb79eebe9cd6") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823367 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-node-bootstrap-token podName:fb9e84d7-85d4-4d27-b238-cb79eebe9cd6 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.323358668 +0000 UTC m=+146.662750957 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "node-bootstrap-token" (UniqueName: "kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-node-bootstrap-token") pod "machine-config-server-6swmr" (UID: "fb9e84d7-85d4-4d27-b238-cb79eebe9cd6") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823412 4665 configmap.go:193] Couldn't get configMap openshift-marketplace/marketplace-trusted-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823454 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca podName:9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.32344453 +0000 UTC m=+146.662836959 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-trusted-ca" (UniqueName: "kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca") pod "marketplace-operator-79b997595-tqfg7" (UID: "9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580") : failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823491 4665 secret.go:188] Couldn't get secret openshift-marketplace/marketplace-operator-metrics: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823531 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics podName:9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.323521512 +0000 UTC m=+146.662913931 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "marketplace-operator-metrics" (UniqueName: "kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics") pod "marketplace-operator-79b997595-tqfg7" (UID: "9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823551 4665 secret.go:188] Couldn't get secret openshift-machine-config-operator/mco-proxy-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823581 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-proxy-tls podName:759b38bd-4ae5-40d9-ada0-c27a4d86dde8 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.323573244 +0000 UTC m=+146.662965653 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-tls" (UniqueName: "kubernetes.io/secret/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-proxy-tls") pod "machine-config-operator-74547568cd-g5zmc" (UID: "759b38bd-4ae5-40d9-ada0-c27a4d86dde8") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823605 4665 secret.go:188] Couldn't get secret openshift-machine-api/control-plane-machine-set-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.823702 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-control-plane-machine-set-operator-tls podName:f20fec7f-c7bb-4bb5-b86c-076b8931aa97 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.323675406 +0000 UTC m=+146.663067705 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "control-plane-machine-set-operator-tls" (UniqueName: "kubernetes.io/secret/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-control-plane-machine-set-operator-tls") pod "control-plane-machine-set-operator-78cbb6b69f-k5gsl" (UID: "f20fec7f-c7bb-4bb5-b86c-076b8931aa97") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.824682 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.825377 4665 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/package-server-manager-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.825432 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/55c71265-8dce-4127-ae57-1f0850d20a80-package-server-manager-serving-cert podName:55c71265-8dce-4127-ae57-1f0850d20a80 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.325415669 +0000 UTC m=+146.664807958 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "package-server-manager-serving-cert" (UniqueName: "kubernetes.io/secret/55c71265-8dce-4127-ae57-1f0850d20a80-package-server-manager-serving-cert") pod "package-server-manager-789f6589d5-794gc" (UID: "55c71265-8dce-4127-ae57-1f0850d20a80") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.830931 4665 secret.go:188] Couldn't get secret openshift-ingress-canary/canary-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.830993 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-cert podName:e3424ab6-4a72-48f5-9fd2-72204d4e2a6b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.330978934 +0000 UTC m=+146.670371313 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-cert") pod "ingress-canary-7lq4t" (UID: "e3424ab6-4a72-48f5-9fd2-72204d4e2a6b") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.830931 4665 secret.go:188] Couldn't get secret openshift-operator-lifecycle-manager/packageserver-service-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.831030 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-webhook-cert podName:d82a4e9d-7232-4652-baec-7f0e395afd4b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.331023365 +0000 UTC m=+146.670415804 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-cert" (UniqueName: "kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-webhook-cert") pod "packageserver-d55dfcdfc-55kmh" (UID: "d82a4e9d-7232-4652-baec-7f0e395afd4b") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.835891 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" event={"ID":"2a2c26ae-4f48-465f-8808-6bb1a443bd89","Type":"ContainerStarted","Data":"10c95c23ca4c96ac76888619ec2bba4a6ab8a91e79f5aacfa820ef9dfdf4c667"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.835930 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" event={"ID":"2a2c26ae-4f48-465f-8808-6bb1a443bd89","Type":"ContainerStarted","Data":"2282f003cfe4898cd91aa6b7abb559af6af0b83dcd319c6a6a3b30d110eff5e0"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.838790 4665 generic.go:334] "Generic (PLEG): container finished" podID="4d7c33e5-a79c-49f8-9d11-17fbe1879911" containerID="689850f1a951385e8d29b39b1d16e02052cfe49231b82eb753451447fb84b799" exitCode=0 Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.838864 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" event={"ID":"4d7c33e5-a79c-49f8-9d11-17fbe1879911","Type":"ContainerDied","Data":"689850f1a951385e8d29b39b1d16e02052cfe49231b82eb753451447fb84b799"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.838881 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" event={"ID":"4d7c33e5-a79c-49f8-9d11-17fbe1879911","Type":"ContainerStarted","Data":"2d44f9d00a39204b1dd5cfebc816b6a675c4862c0e4e40f196e13faee7ab5f76"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.845856 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.851206 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" event={"ID":"30a44c68-0827-46b3-ba7c-5aabf5cb34b1","Type":"ContainerStarted","Data":"6c13eb8d6e730b64c6bac06e5cc54878eb9b1f9764d114ecadf3c94d6ae5ae9d"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.854020 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" event={"ID":"948ea842-e99b-4bb1-88df-b58ad8c75e31","Type":"ContainerStarted","Data":"dcd6a531e8c5b5a44f0865e00aca51085bed81ec0a4af77d532a3733046a230e"} Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.857286 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.857448 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:14:52.857407524 +0000 UTC m=+268.196799883 (durationBeforeRetry 2m2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.863672 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.887709 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.907544 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.923806 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.933782 4665 configmap.go:193] Couldn't get configMap openshift-service-ca-operator/service-ca-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.934116 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/1ccf81f6-7188-4348-b61e-cb5f347e888a-config podName:1ccf81f6-7188-4348-b61e-cb5f347e888a nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.43393776 +0000 UTC m=+146.773330059 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/1ccf81f6-7188-4348-b61e-cb5f347e888a-config") pod "service-ca-operator-777779d784-lx8ts" (UID: "1ccf81f6-7188-4348-b61e-cb5f347e888a") : failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.934273 4665 configmap.go:193] Couldn't get configMap openshift-operator-lifecycle-manager/collect-profiles-config: failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.934354 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume podName:4ebede5a-b1cb-4059-b158-ce9a85aed080 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.43434263 +0000 UTC m=+146.773734929 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-volume" (UniqueName: "kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume") pod "collect-profiles-29414940-rn5fx" (UID: "4ebede5a-b1cb-4059-b158-ce9a85aed080") : failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.934399 4665 secret.go:188] Couldn't get secret openshift-service-ca/signing-key: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.934429 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-key podName:945b1001-04d4-4312-ab2c-39bddf4851f2 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.434422611 +0000 UTC m=+146.773814910 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-key" (UniqueName: "kubernetes.io/secret/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-key") pod "service-ca-9c57cc56f-wdjtm" (UID: "945b1001-04d4-4312-ab2c-39bddf4851f2") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.934452 4665 configmap.go:193] Couldn't get configMap openshift-service-ca/signing-cabundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.934502 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-cabundle podName:945b1001-04d4-4312-ab2c-39bddf4851f2 nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.434495134 +0000 UTC m=+146.773887433 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "signing-cabundle" (UniqueName: "kubernetes.io/configmap/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-cabundle") pod "service-ca-9c57cc56f-wdjtm" (UID: "945b1001-04d4-4312-ab2c-39bddf4851f2") : failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.935254 4665 secret.go:188] Couldn't get secret openshift-service-ca-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: E1205 01:12:50.935372 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1ccf81f6-7188-4348-b61e-cb5f347e888a-serving-cert podName:1ccf81f6-7188-4348-b61e-cb5f347e888a nodeName:}" failed. No retries permitted until 2025-12-05 01:12:51.435361435 +0000 UTC m=+146.774753734 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/1ccf81f6-7188-4348-b61e-cb5f347e888a-serving-cert") pod "service-ca-operator-777779d784-lx8ts" (UID: "1ccf81f6-7188-4348-b61e-cb5f347e888a") : failed to sync secret cache: timed out waiting for the condition Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.944343 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.960337 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.961457 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.961482 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.961564 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.962824 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.963510 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.972324 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.972868 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:50 crc kubenswrapper[4665]: I1205 01:12:50.973280 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.011638 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.026721 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.046656 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.062163 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.085003 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.110857 4665 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.117543 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.125538 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.128119 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.133768 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.149395 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.172677 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.187664 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.213821 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.236070 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.243513 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.265123 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.268133 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-srv-cert\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.268209 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-profile-collector-cert\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.289390 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-profile-collector-cert\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.297905 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.308948 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/6c57b974-85bf-41ea-83aa-096fe4228a87-srv-cert\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.316620 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.332665 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.345494 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370193 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-webhook-cert\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370473 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4eb1d49b-1c1f-448c-888a-63d7966b8480-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sf489\" (UID: \"4eb1d49b-1c1f-448c-888a-63d7966b8480\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370516 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-apiservice-cert\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370550 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b9a9d28c-11ea-4fef-ac00-13eb549271ff-proxy-tls\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370579 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-proxy-tls\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370602 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-srv-cert\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370663 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-node-bootstrap-token\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370688 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370706 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-certs\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370738 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370755 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-k5gsl\" (UID: \"f20fec7f-c7bb-4bb5-b86c-076b8931aa97\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370794 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/55c71265-8dce-4127-ae57-1f0850d20a80-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-794gc\" (UID: \"55c71265-8dce-4127-ae57-1f0850d20a80\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.370824 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-cert\") pod \"ingress-canary-7lq4t\" (UID: \"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b\") " pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.376729 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.378503 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-cert\") pod \"ingress-canary-7lq4t\" (UID: \"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b\") " pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.385601 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.388900 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.389750 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-srv-cert\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.396898 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.397381 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-k5gsl\" (UID: \"f20fec7f-c7bb-4bb5-b86c-076b8931aa97\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.398888 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-apiservice-cert\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.400783 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-certs\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.403178 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/55c71265-8dce-4127-ae57-1f0850d20a80-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-794gc\" (UID: \"55c71265-8dce-4127-ae57-1f0850d20a80\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.412856 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.428762 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/b9a9d28c-11ea-4fef-ac00-13eb549271ff-proxy-tls\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.428995 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.431278 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/4eb1d49b-1c1f-448c-888a-63d7966b8480-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-sf489\" (UID: \"4eb1d49b-1c1f-448c-888a-63d7966b8480\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.432053 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-proxy-tls\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.432632 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d82a4e9d-7232-4652-baec-7f0e395afd4b-webhook-cert\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.433129 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-node-bootstrap-token\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.457442 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.465616 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.471638 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-cabundle\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.471717 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ccf81f6-7188-4348-b61e-cb5f347e888a-config\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.471744 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ccf81f6-7188-4348-b61e-cb5f347e888a-serving-cert\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.471805 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.471822 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-key\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.472536 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1ccf81f6-7188-4348-b61e-cb5f347e888a-config\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.473129 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.484141 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.504906 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.528104 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.544109 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.544787 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1ccf81f6-7188-4348-b61e-cb5f347e888a-serving-cert\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.567435 4665 request.go:700] Waited for 1.877460399s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-service-ca/secrets?fieldSelector=metadata.name%3Dservice-ca-dockercfg-pn86c&limit=500&resourceVersion=0 Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.570883 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.592432 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.596655 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-key\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.602593 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.603737 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/945b1001-04d4-4312-ab2c-39bddf4851f2-signing-cabundle\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.650189 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.681857 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znkf5\" (UniqueName: \"kubernetes.io/projected/6afac4e7-1280-46f1-9571-a8aaad37b32f-kube-api-access-znkf5\") pod \"kube-storage-version-migrator-operator-b67b599dd-7qh5v\" (UID: \"6afac4e7-1280-46f1-9571-a8aaad37b32f\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.707144 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/90d5f8b3-804d-4f32-81b9-230301d5a834-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-zq8h8\" (UID: \"90d5f8b3-804d-4f32-81b9-230301d5a834\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.715245 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2595c30a-ace8-4674-8691-4353a9e55c46-bound-sa-token\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.729670 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.732721 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7bbd42a4-d665-4950-89b1-ff6c53ac0b60-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-x8jz5\" (UID: \"7bbd42a4-d665-4950-89b1-ff6c53ac0b60\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:51 crc kubenswrapper[4665]: E1205 01:12:51.756351 4665 projected.go:288] Couldn't get configMap openshift-console-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:51 crc kubenswrapper[4665]: E1205 01:12:51.756406 4665 projected.go:194] Error preparing data for projected volume kube-api-access-twd8d for pod openshift-console-operator/console-operator-58897d9998-4m9lk: failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:51 crc kubenswrapper[4665]: E1205 01:12:51.756465 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d podName:9c541bcd-d60a-4e73-8bcd-2d502eebbb9a nodeName:}" failed. No retries permitted until 2025-12-05 01:12:52.756447237 +0000 UTC m=+148.095839526 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-twd8d" (UniqueName: "kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d") pod "console-operator-58897d9998-4m9lk" (UID: "9c541bcd-d60a-4e73-8bcd-2d502eebbb9a") : failed to sync configmap cache: timed out waiting for the condition Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.780860 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2478a25b-a8b7-44b5-8204-5862f15fb53d-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-bdr9q\" (UID: \"2478a25b-a8b7-44b5-8204-5862f15fb53d\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.781242 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxthd\" (UniqueName: \"kubernetes.io/projected/6ae00a40-d6ad-4d9d-9eca-8c09a9818801-kube-api-access-zxthd\") pod \"dns-default-qgtfk\" (UID: \"6ae00a40-d6ad-4d9d-9eca-8c09a9818801\") " pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.793532 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbn7g\" (UniqueName: \"kubernetes.io/projected/2595c30a-ace8-4674-8691-4353a9e55c46-kube-api-access-kbn7g\") pod \"ingress-operator-5b745b69d9-xpmr7\" (UID: \"2595c30a-ace8-4674-8691-4353a9e55c46\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.794142 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.849541 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8l8b\" (UniqueName: \"kubernetes.io/projected/10397df3-a399-4278-8909-538e0b8c3c01-kube-api-access-z8l8b\") pod \"openshift-apiserver-operator-796bbdcf4f-4wbqn\" (UID: \"10397df3-a399-4278-8909-538e0b8c3c01\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.856141 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.856919 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dznf7\" (UniqueName: \"kubernetes.io/projected/7d763d3d-4925-4c67-8828-873c9a8dc973-kube-api-access-dznf7\") pod \"migrator-59844c95c7-snslb\" (UID: \"7d763d3d-4925-4c67-8828-873c9a8dc973\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.858449 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c9sx8\" (UniqueName: \"kubernetes.io/projected/191296ac-80a8-4b64-acf8-d0087ac08c79-kube-api-access-c9sx8\") pod \"router-default-5444994796-7wl87\" (UID: \"191296ac-80a8-4b64-acf8-d0087ac08c79\") " pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.872661 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fnpfl\" (UniqueName: \"kubernetes.io/projected/6c57b974-85bf-41ea-83aa-096fe4228a87-kube-api-access-fnpfl\") pod \"catalog-operator-68c6474976-thm7j\" (UID: \"6c57b974-85bf-41ea-83aa-096fe4228a87\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.890372 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" event={"ID":"5e25d774-afc2-4e37-9121-79f761e9e8d9","Type":"ContainerStarted","Data":"466084b0820bde5ab17b676a39f92d3585a36946854df1124a17ecc389fead7f"} Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.890568 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.896510 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.915440 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bm7q\" (UniqueName: \"kubernetes.io/projected/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-kube-api-access-4bm7q\") pod \"marketplace-operator-79b997595-tqfg7\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.955089 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f45pc\" (UniqueName: \"kubernetes.io/projected/d82a4e9d-7232-4652-baec-7f0e395afd4b-kube-api-access-f45pc\") pod \"packageserver-d55dfcdfc-55kmh\" (UID: \"d82a4e9d-7232-4652-baec-7f0e395afd4b\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.957639 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" event={"ID":"4d7c33e5-a79c-49f8-9d11-17fbe1879911","Type":"ContainerStarted","Data":"945fe227b4ebbf5a1b62b62a61eeb30b59e407452ad231af83447dec5041c29f"} Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.958359 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h74x2\" (UniqueName: \"kubernetes.io/projected/e3424ab6-4a72-48f5-9fd2-72204d4e2a6b-kube-api-access-h74x2\") pod \"ingress-canary-7lq4t\" (UID: \"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b\") " pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.958865 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.967553 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.990586 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-hkzlk" event={"ID":"d808dde0-4fdc-4d21-a6f2-3c27f540018f","Type":"ContainerStarted","Data":"614ed74ecf685a706a007e7238f88ae29e99eecf3b10eb9cf4fb5739707ce57f"} Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.991284 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-hkzlk" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.992823 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvt6l\" (UniqueName: \"kubernetes.io/projected/b9a9d28c-11ea-4fef-ac00-13eb549271ff-kube-api-access-rvt6l\") pod \"machine-config-controller-84d6567774-h4gnf\" (UID: \"b9a9d28c-11ea-4fef-ac00-13eb549271ff\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.993016 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.996721 4665 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkzlk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Dec 05 01:12:51 crc kubenswrapper[4665]: I1205 01:12:51.996769 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkzlk" podUID="d808dde0-4fdc-4d21-a6f2-3c27f540018f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.009747 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.012440 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-7lq4t" Dec 05 01:12:52 crc kubenswrapper[4665]: W1205 01:12:52.032427 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod191296ac_80a8_4b64_acf8_d0087ac08c79.slice/crio-c7224d4c60c07b40c918fa836db83be775fde1b4825a7e9e1c26ff35ef72c1a4 WatchSource:0}: Error finding container c7224d4c60c07b40c918fa836db83be775fde1b4825a7e9e1c26ff35ef72c1a4: Status 404 returned error can't find the container with id c7224d4c60c07b40c918fa836db83be775fde1b4825a7e9e1c26ff35ef72c1a4 Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.032726 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" event={"ID":"30a44c68-0827-46b3-ba7c-5aabf5cb34b1","Type":"ContainerStarted","Data":"a1fc237378f3a353d7c352b51ba481ec6721b393309913e6c49690f9128484cb"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.032762 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" event={"ID":"30a44c68-0827-46b3-ba7c-5aabf5cb34b1","Type":"ContainerStarted","Data":"330fa5e64fe4eb64c8ad800c8c15ea485dc72116b703f2aef6bdfb83f44f63fd"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.039287 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q9qh9\" (UniqueName: \"kubernetes.io/projected/759b38bd-4ae5-40d9-ada0-c27a4d86dde8-kube-api-access-q9qh9\") pod \"machine-config-operator-74547568cd-g5zmc\" (UID: \"759b38bd-4ae5-40d9-ada0-c27a4d86dde8\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.053595 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbckp\" (UniqueName: \"kubernetes.io/projected/cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774-kube-api-access-rbckp\") pod \"olm-operator-6b444d44fb-9bxhg\" (UID: \"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.059355 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftxx8\" (UniqueName: \"kubernetes.io/projected/4eb1d49b-1c1f-448c-888a-63d7966b8480-kube-api-access-ftxx8\") pod \"multus-admission-controller-857f4d67dd-sf489\" (UID: \"4eb1d49b-1c1f-448c-888a-63d7966b8480\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.059572 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.080502 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.088857 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jq98p\" (UniqueName: \"kubernetes.io/projected/29ef4f10-a6b6-4551-8067-0a82efc5651d-kube-api-access-jq98p\") pod \"oauth-openshift-558db77b4-6bhrf\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.091617 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.103996 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" event={"ID":"948ea842-e99b-4bb1-88df-b58ad8c75e31","Type":"ContainerStarted","Data":"cd6582a4a890e6722128dc72278e2b4309577dc7955aedba958e67d58fe3b4a3"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.116386 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkjbl\" (UniqueName: \"kubernetes.io/projected/55c71265-8dce-4127-ae57-1f0850d20a80-kube-api-access-mkjbl\") pod \"package-server-manager-789f6589d5-794gc\" (UID: \"55c71265-8dce-4127-ae57-1f0850d20a80\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.117047 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hkh4p\" (UniqueName: \"kubernetes.io/projected/fb9e84d7-85d4-4d27-b238-cb79eebe9cd6-kube-api-access-hkh4p\") pod \"machine-config-server-6swmr\" (UID: \"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6\") " pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.125805 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-chsk5\" (UniqueName: \"kubernetes.io/projected/f8f58b77-587d-4631-acf8-eac2c8a3ce4f-kube-api-access-chsk5\") pod \"csi-hostpathplugin-5jczg\" (UID: \"f8f58b77-587d-4631-acf8-eac2c8a3ce4f\") " pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:52 crc kubenswrapper[4665]: W1205 01:12:52.131041 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-2dc87966b36e10c0a25126c4ecfd4fc0f7a4cf828f82334cf37d903ba32268de WatchSource:0}: Error finding container 2dc87966b36e10c0a25126c4ecfd4fc0f7a4cf828f82334cf37d903ba32268de: Status 404 returned error can't find the container with id 2dc87966b36e10c0a25126c4ecfd4fc0f7a4cf828f82334cf37d903ba32268de Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.131617 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" event={"ID":"648c3d09-edc5-43d1-acad-e3a36aa4c0d4","Type":"ContainerStarted","Data":"c1e9e08ef46e2f41f4c13361852f65751396b3a3b743eb21b13c0d9c6bfb5700"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.156657 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.157250 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b2csh\" (UniqueName: \"kubernetes.io/projected/1ccf81f6-7188-4348-b61e-cb5f347e888a-kube-api-access-b2csh\") pod \"service-ca-operator-777779d784-lx8ts\" (UID: \"1ccf81f6-7188-4348-b61e-cb5f347e888a\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.157927 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2sr6g\" (UniqueName: \"kubernetes.io/projected/f20fec7f-c7bb-4bb5-b86c-076b8931aa97-kube-api-access-2sr6g\") pod \"control-plane-machine-set-operator-78cbb6b69f-k5gsl\" (UID: \"f20fec7f-c7bb-4bb5-b86c-076b8931aa97\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.166591 4665 generic.go:334] "Generic (PLEG): container finished" podID="1d59a021-618b-40a7-b2b6-4013c084d74e" containerID="da38ac7e6b979187902c7ba1fcbf906efe12b22d8403a54919ca75224416dd6f" exitCode=0 Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.166652 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" event={"ID":"1d59a021-618b-40a7-b2b6-4013c084d74e","Type":"ContainerDied","Data":"da38ac7e6b979187902c7ba1fcbf906efe12b22d8403a54919ca75224416dd6f"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.166675 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" event={"ID":"1d59a021-618b-40a7-b2b6-4013c084d74e","Type":"ContainerStarted","Data":"e113559e2ffc57aaf4c37ba02be3d588c99a9adb14db0fb709b522acb725a66b"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.167213 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.167509 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.186359 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pmfl\" (UniqueName: \"kubernetes.io/projected/4ebede5a-b1cb-4059-b158-ce9a85aed080-kube-api-access-4pmfl\") pod \"collect-profiles-29414940-rn5fx\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.186578 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.186838 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.187693 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fdf6b\" (UniqueName: \"kubernetes.io/projected/945b1001-04d4-4312-ab2c-39bddf4851f2-kube-api-access-fdf6b\") pod \"service-ca-9c57cc56f-wdjtm\" (UID: \"945b1001-04d4-4312-ab2c-39bddf4851f2\") " pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.206859 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.247766 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.248092 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" event={"ID":"34ce612a-9777-4777-b6af-e98a53b3fb57","Type":"ContainerStarted","Data":"01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.248367 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.254539 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.277611 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" event={"ID":"cfa776fb-8b8d-4cd1-941a-57e2672afdeb","Type":"ContainerStarted","Data":"2a26663b18ab8f224c754fb7c6c2b38089b6813ded6fc8b45c027ba4241683f3"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.277686 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" event={"ID":"cfa776fb-8b8d-4cd1-941a-57e2672afdeb","Type":"ContainerStarted","Data":"dba85fdc7d84e56f43b195425cad52ee351ff913447abb6cf70c00ed948a1f41"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.283934 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbvlz\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-kube-api-access-jbvlz\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.283980 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.284100 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-trusted-ca\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.284132 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-tls\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.284187 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-bound-sa-token\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.284223 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-certificates\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.285473 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:52.785451315 +0000 UTC m=+148.124843614 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.287396 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.321061 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bef0c5a0-6fef-4199-8782-99bc78b35374-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.321732 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bef0c5a0-6fef-4199-8782-99bc78b35374-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.362394 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" event={"ID":"044a062b-0f88-42e6-9d3a-7a74dfa51ca5","Type":"ContainerStarted","Data":"0032693617c02ef802d42616dda3d74a392130c8a2ce3a40aed7f5788f0f41f9"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.382655 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-5jczg" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.389571 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.391056 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.403872 4665 generic.go:334] "Generic (PLEG): container finished" podID="1cbef097-04e8-4837-9fbb-f37d6b9dafb2" containerID="696a14b78396451f95eebaba3e411192148180632554950849b7f2f9a27c0b32" exitCode=0 Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.403993 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" event={"ID":"1cbef097-04e8-4837-9fbb-f37d6b9dafb2","Type":"ContainerDied","Data":"696a14b78396451f95eebaba3e411192148180632554950849b7f2f9a27c0b32"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.404026 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" event={"ID":"1cbef097-04e8-4837-9fbb-f37d6b9dafb2","Type":"ContainerStarted","Data":"59f0c396a64ce4dde565c1c52e02b9af3eed392b39bb0c7085af834539b9c9e3"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.415612 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-6swmr" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.425132 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.425380 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:52.925361598 +0000 UTC m=+148.264753897 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.425420 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bef0c5a0-6fef-4199-8782-99bc78b35374-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.425529 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbvlz\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-kube-api-access-jbvlz\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.425585 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.425674 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-trusted-ca\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.425707 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-tls\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.425817 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-bound-sa-token\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.425842 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-certificates\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.426023 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bef0c5a0-6fef-4199-8782-99bc78b35374-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.427463 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-trusted-ca\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.429515 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-certificates\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.434422 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:52.934410928 +0000 UTC m=+148.273803227 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.438997 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.440358 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bef0c5a0-6fef-4199-8782-99bc78b35374-ca-trust-extracted\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.463211 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bef0c5a0-6fef-4199-8782-99bc78b35374-installation-pull-secrets\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.468796 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-tls\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.471343 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" event={"ID":"fc878503-97d1-4f69-a607-1ccae9ca303a","Type":"ContainerStarted","Data":"bdee7553f86cb2a4c038c84b3deba6ceb6e158984a38c1863ce540a86082967c"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.471389 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" event={"ID":"fc878503-97d1-4f69-a607-1ccae9ca303a","Type":"ContainerStarted","Data":"529d92a40d820e1fb8d5e51971ec7d62b293b4f08e0899dc1d9812c0ae7f1681"} Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.475677 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-bound-sa-token\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.478110 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbvlz\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-kube-api-access-jbvlz\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.480040 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.526780 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.527842 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.027826413 +0000 UTC m=+148.367218712 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.540376 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" podStartSLOduration=129.540361396 podStartE2EDuration="2m9.540361396s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:52.539859714 +0000 UTC m=+147.879252013" watchObservedRunningTime="2025-12-05 01:12:52.540361396 +0000 UTC m=+147.879753695" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.586957 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.628676 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.629708 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.129696863 +0000 UTC m=+148.469089162 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.641054 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-7bl9r" podStartSLOduration=129.641039888 podStartE2EDuration="2m9.641039888s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:52.600386052 +0000 UTC m=+147.939778351" watchObservedRunningTime="2025-12-05 01:12:52.641039888 +0000 UTC m=+147.980432177" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.729691 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.729850 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.229825431 +0000 UTC m=+148.569217730 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.730168 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.730489 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.230481177 +0000 UTC m=+148.569873476 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.741930 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-xv889" podStartSLOduration=129.741915474 podStartE2EDuration="2m9.741915474s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:52.741214277 +0000 UTC m=+148.080606566" watchObservedRunningTime="2025-12-05 01:12:52.741915474 +0000 UTC m=+148.081307773" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.782020 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v"] Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.830851 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.831066 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twd8d\" (UniqueName: \"kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.831196 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.331171419 +0000 UTC m=+148.670563718 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.934539 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:52 crc kubenswrapper[4665]: E1205 01:12:52.935096 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.435083888 +0000 UTC m=+148.774476187 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.953417 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twd8d\" (UniqueName: \"kubernetes.io/projected/9c541bcd-d60a-4e73-8bcd-2d502eebbb9a-kube-api-access-twd8d\") pod \"console-operator-58897d9998-4m9lk\" (UID: \"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a\") " pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:52 crc kubenswrapper[4665]: I1205 01:12:52.991655 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.016025 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8"] Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.035370 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.035764 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.53574846 +0000 UTC m=+148.875140759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: W1205 01:12:53.080057 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90d5f8b3_804d_4f32_81b9_230301d5a834.slice/crio-64885f761ebcadef3e672d4caf1ff4a415dcf98be835f1222ba900cacc4186da WatchSource:0}: Error finding container 64885f761ebcadef3e672d4caf1ff4a415dcf98be835f1222ba900cacc4186da: Status 404 returned error can't find the container with id 64885f761ebcadef3e672d4caf1ff4a415dcf98be835f1222ba900cacc4186da Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.136978 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.137376 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.637364344 +0000 UTC m=+148.976756643 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.238021 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.238914 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.738893796 +0000 UTC m=+149.078286095 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.343962 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.344273 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.844262072 +0000 UTC m=+149.183654371 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.448969 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.449349 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:53.94933379 +0000 UTC m=+149.288726089 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.467643 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-qgtfk"] Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.515700 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"99a336f8d3fda4454d842dbab820599d8fa11d88fa83c3c94865844b034c834b"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.515735 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"2dc87966b36e10c0a25126c4ecfd4fc0f7a4cf828f82334cf37d903ba32268de"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.552705 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.553238 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.053224039 +0000 UTC m=+149.392616338 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.559544 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" event={"ID":"90d5f8b3-804d-4f32-81b9-230301d5a834","Type":"ContainerStarted","Data":"64885f761ebcadef3e672d4caf1ff4a415dcf98be835f1222ba900cacc4186da"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.573694 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" event={"ID":"6afac4e7-1280-46f1-9571-a8aaad37b32f","Type":"ContainerStarted","Data":"b0763ad474d2dc3fb8b39dcfbdbdee94f10d7abe2cc5bfbcf508916ccce39bca"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.627569 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" event={"ID":"4d7c33e5-a79c-49f8-9d11-17fbe1879911","Type":"ContainerStarted","Data":"70ff6b73fffdfe6f656f079f29de9f32613bc529672217cb11d940d66303fa30"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.630289 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"2777bb23b62bd223bc02059881070cdf436e855f09254dc92db307974526a7a6"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.630335 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"5e604ac898ce037b1c770f9014e32fc46b0662e9896accb2529be90f763a6c2e"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.630858 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.657755 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.658933 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.158909622 +0000 UTC m=+149.498301961 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.678559 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6swmr" event={"ID":"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6","Type":"ContainerStarted","Data":"92b74cf03543c3ec4710cc38bc1f01f80dcb353bdba5b63157f323bc77282fac"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.711084 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-7wl87" event={"ID":"191296ac-80a8-4b64-acf8-d0087ac08c79","Type":"ContainerStarted","Data":"0c84ad4908427c42f51d4663eaaccafe4c32ff3075a24c923b28585441af14f7"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.711128 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-7wl87" event={"ID":"191296ac-80a8-4b64-acf8-d0087ac08c79","Type":"ContainerStarted","Data":"c7224d4c60c07b40c918fa836db83be775fde1b4825a7e9e1c26ff35ef72c1a4"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.730375 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"695a7bf389669285f2f7a16fdcfd1062c0238d03cc13b8bc4abb835e5d7982f4"} Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.737846 4665 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkzlk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.737900 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkzlk" podUID="d808dde0-4fdc-4d21-a6f2-3c27f540018f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.760845 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.762582 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.262570005 +0000 UTC m=+149.601962304 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.787989 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q"] Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.863811 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.863963 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.363939614 +0000 UTC m=+149.703331913 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.864036 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.864345 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.364331223 +0000 UTC m=+149.703723522 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.897378 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.908154 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j"] Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.927554 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:12:53 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:12:53 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:12:53 crc kubenswrapper[4665]: healthz check failed Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.927615 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.960385 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5"] Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.966837 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:53 crc kubenswrapper[4665]: E1205 01:12:53.967235 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.467221669 +0000 UTC m=+149.806613968 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:53 crc kubenswrapper[4665]: I1205 01:12:53.968435 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-fqjcd" podStartSLOduration=130.968426107 podStartE2EDuration="2m10.968426107s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:53.966251085 +0000 UTC m=+149.305643394" watchObservedRunningTime="2025-12-05 01:12:53.968426107 +0000 UTC m=+149.307818406" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.073688 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.074156 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.574133611 +0000 UTC m=+149.913525910 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.178798 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.179070 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.679054086 +0000 UTC m=+150.018446385 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.281334 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.281987 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.781973241 +0000 UTC m=+150.121365540 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.382784 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.383137 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.883121205 +0000 UTC m=+150.222513504 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.429440 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.429762 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.485258 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.485718 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:54.985706282 +0000 UTC m=+150.325098571 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.517117 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqfg7"] Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.576254 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" podStartSLOduration=130.576239488 podStartE2EDuration="2m10.576239488s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:54.575049949 +0000 UTC m=+149.914442248" watchObservedRunningTime="2025-12-05 01:12:54.576239488 +0000 UTC m=+149.915631787" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.586247 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.586582 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.086567758 +0000 UTC m=+150.425960057 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.602937 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" podStartSLOduration=130.602920585 podStartE2EDuration="2m10.602920585s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:54.60275762 +0000 UTC m=+149.942149919" watchObservedRunningTime="2025-12-05 01:12:54.602920585 +0000 UTC m=+149.942312894" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.673913 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-45lxk" podStartSLOduration=131.673888975 podStartE2EDuration="2m11.673888975s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:54.673034615 +0000 UTC m=+150.012426914" watchObservedRunningTime="2025-12-05 01:12:54.673888975 +0000 UTC m=+150.013281274" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.691086 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.691454 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.191441712 +0000 UTC m=+150.530834011 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.702802 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-7lq4t"] Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.776983 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" event={"ID":"6c57b974-85bf-41ea-83aa-096fe4228a87","Type":"ContainerStarted","Data":"fc848ad5dcc29cd5f88ad63c604a7e82ec85a699aa5197886762a23c3af1d92a"} Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.777021 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" event={"ID":"6c57b974-85bf-41ea-83aa-096fe4228a87","Type":"ContainerStarted","Data":"961d4f28b095ff7ccb73d5cecd9d1e02602af789f9f18ad94408d02a1d30a5ea"} Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.777824 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.791938 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.792327 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.292308927 +0000 UTC m=+150.631701226 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.803726 4665 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-thm7j container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.803765 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" podUID="6c57b974-85bf-41ea-83aa-096fe4228a87" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.803847 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-7lq4t" event={"ID":"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b","Type":"ContainerStarted","Data":"d9a8f883772fde6da93b6f94cc2b32997817d4900f26608ac3978ac273d9616a"} Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.831980 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-86r58" podStartSLOduration=131.831945808 podStartE2EDuration="2m11.831945808s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:54.823523914 +0000 UTC m=+150.162916213" watchObservedRunningTime="2025-12-05 01:12:54.831945808 +0000 UTC m=+150.171338107" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.844488 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" event={"ID":"7bbd42a4-d665-4950-89b1-ff6c53ac0b60","Type":"ContainerStarted","Data":"ede1ef53b5fe5449676b12b22fb83dfbd38ece86573c85f71f03c429ec54ab75"} Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.858357 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" event={"ID":"2478a25b-a8b7-44b5-8204-5862f15fb53d","Type":"ContainerStarted","Data":"fd65c4021f1dee2330b0a083c217301a19eb217eeb0c4552c88cbf11a64056fb"} Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.872698 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-njkt2" podStartSLOduration=131.872683616 podStartE2EDuration="2m11.872683616s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:54.870827202 +0000 UTC m=+150.210219501" watchObservedRunningTime="2025-12-05 01:12:54.872683616 +0000 UTC m=+150.212075915" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.896156 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.896861 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.396845592 +0000 UTC m=+150.736237891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.911713 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:12:54 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:12:54 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:12:54 crc kubenswrapper[4665]: healthz check failed Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.911759 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.941288 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-6swmr" event={"ID":"fb9e84d7-85d4-4d27-b238-cb79eebe9cd6","Type":"ContainerStarted","Data":"0da77b25f6acbd0e7835ce710dd6dc21ddcd2a5becfab9ec26e161f727e1dffd"} Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.976611 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-hkzlk" podStartSLOduration=131.976590426 podStartE2EDuration="2m11.976590426s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:54.960616629 +0000 UTC m=+150.300008928" watchObservedRunningTime="2025-12-05 01:12:54.976590426 +0000 UTC m=+150.315982725" Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.978668 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" event={"ID":"90d5f8b3-804d-4f32-81b9-230301d5a834","Type":"ContainerStarted","Data":"faae92253fe896748d771b2fa1b50e3f03acbc535b1acd7371e3df69141b2b8a"} Dec 05 01:12:54 crc kubenswrapper[4665]: I1205 01:12:54.997662 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:54 crc kubenswrapper[4665]: E1205 01:12:54.997880 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.497865283 +0000 UTC m=+150.837257582 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.006229 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" event={"ID":"6afac4e7-1280-46f1-9571-a8aaad37b32f","Type":"ContainerStarted","Data":"c3b68de00f6cc10e7522359125c98a9f435e137505bbec277c00a33d5504ea6d"} Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.007257 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.007660 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.50764768 +0000 UTC m=+150.847039979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.020038 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.035706 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qgtfk" event={"ID":"6ae00a40-d6ad-4d9d-9eca-8c09a9818801","Type":"ContainerStarted","Data":"368f589082a01d835f8c75ea1294a9207a1fce422b8fa27aa07eae8ee44395e8"} Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.035747 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qgtfk" event={"ID":"6ae00a40-d6ad-4d9d-9eca-8c09a9818801","Type":"ContainerStarted","Data":"020daf29f54d066e5dad2452d0f473a5e27d0ad4fcdc20b7e85229892cb5c092"} Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.068224 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" event={"ID":"1cbef097-04e8-4837-9fbb-f37d6b9dafb2","Type":"ContainerStarted","Data":"c7cbbefdf5849b3925ee58addb28b2ff341f7cdad8d34c53892e5113777ba466"} Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.090214 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-q7hzk" podStartSLOduration=132.090200612 podStartE2EDuration="2m12.090200612s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:55.089059854 +0000 UTC m=+150.428452153" watchObservedRunningTime="2025-12-05 01:12:55.090200612 +0000 UTC m=+150.429592911" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.091168 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" event={"ID":"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580","Type":"ContainerStarted","Data":"c9242d3d8e3e6b80420ce349c51306f8e43b54136c80ac3a06bd298be8456433"} Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.107948 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.108253 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.608230238 +0000 UTC m=+150.947622537 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.111697 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6fa584358fc6b4fa7d2ebdad4b962367dbedfd15746e0503344273a33a619818"} Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.129723 4665 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkzlk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.129776 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkzlk" podUID="d808dde0-4fdc-4d21-a6f2-3c27f540018f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.152469 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.177898 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.178433 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.202538 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.210093 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.214692 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.714533886 +0000 UTC m=+151.053926185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.250452 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-sf489"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.265813 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.269329 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-42qzk" podStartSLOduration=131.269308945 podStartE2EDuration="2m11.269308945s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:55.260237845 +0000 UTC m=+150.599630134" watchObservedRunningTime="2025-12-05 01:12:55.269308945 +0000 UTC m=+150.608701244" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.291111 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-6bhrf"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.315800 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.316051 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.816035838 +0000 UTC m=+151.155428137 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.316383 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.316619 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.816613322 +0000 UTC m=+151.156005621 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.421202 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.421574 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.921521556 +0000 UTC m=+151.260913855 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.421792 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.429392 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:55.929345886 +0000 UTC m=+151.268738185 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.460891 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-rgvgv" podStartSLOduration=132.46087184 podStartE2EDuration="2m12.46087184s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:55.44476672 +0000 UTC m=+150.784159019" watchObservedRunningTime="2025-12-05 01:12:55.46087184 +0000 UTC m=+150.800264139" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.474866 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.494216 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-7wl87" podStartSLOduration=131.494198838 podStartE2EDuration="2m11.494198838s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:55.493505502 +0000 UTC m=+150.832897811" watchObservedRunningTime="2025-12-05 01:12:55.494198838 +0000 UTC m=+150.833591137" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.507835 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.512656 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.522657 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.522917 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.022901455 +0000 UTC m=+151.362293764 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.557815 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-sjmw5" podStartSLOduration=132.557796661 podStartE2EDuration="2m12.557796661s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:55.540973443 +0000 UTC m=+150.880365742" watchObservedRunningTime="2025-12-05 01:12:55.557796661 +0000 UTC m=+150.897188960" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.570559 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.579652 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-wdjtm"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.625046 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.625463 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.125450451 +0000 UTC m=+151.464842750 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.702065 4665 patch_prober.go:28] interesting pod/apiserver-76f77b778f-vl66w container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]log ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]etcd ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/generic-apiserver-start-informers ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/max-in-flight-filter ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 05 01:12:55 crc kubenswrapper[4665]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 05 01:12:55 crc kubenswrapper[4665]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/project.openshift.io-projectcache ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/openshift.io-startinformers ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 05 01:12:55 crc kubenswrapper[4665]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 05 01:12:55 crc kubenswrapper[4665]: livez check failed Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.702428 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" podUID="4d7c33e5-a79c-49f8-9d11-17fbe1879911" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.741672 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.742009 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.241991857 +0000 UTC m=+151.581384156 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.742564 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-5jczg"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.819493 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.843530 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.844141 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.344112864 +0000 UTC m=+151.683505163 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.864045 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4m9lk"] Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.895753 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:12:55 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:12:55 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:12:55 crc kubenswrapper[4665]: healthz check failed Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.896082 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.912694 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-6swmr" podStartSLOduration=6.912676717 podStartE2EDuration="6.912676717s" podCreationTimestamp="2025-12-05 01:12:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:55.911375525 +0000 UTC m=+151.250767824" watchObservedRunningTime="2025-12-05 01:12:55.912676717 +0000 UTC m=+151.252069016" Dec 05 01:12:55 crc kubenswrapper[4665]: I1205 01:12:55.944894 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:55 crc kubenswrapper[4665]: E1205 01:12:55.945246 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.445228826 +0000 UTC m=+151.784621125 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.048215 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.048536 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.548522941 +0000 UTC m=+151.887915240 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.115073 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-zq8h8" podStartSLOduration=132.115057615 podStartE2EDuration="2m12.115057615s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.1016567 +0000 UTC m=+151.441048999" watchObservedRunningTime="2025-12-05 01:12:56.115057615 +0000 UTC m=+151.454449914" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.149582 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.149865 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.649848828 +0000 UTC m=+151.989241127 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.250512 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.250859 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.750849208 +0000 UTC m=+152.090241497 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.268041 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg"] Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.299245 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" event={"ID":"2478a25b-a8b7-44b5-8204-5862f15fb53d","Type":"ContainerStarted","Data":"9381726f2806ccb8efe06559a4a78d52f899032c9733903d791901c6b8045c2b"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.349102 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" event={"ID":"f20fec7f-c7bb-4bb5-b86c-076b8931aa97","Type":"ContainerStarted","Data":"2a990ba2d84f8a8e06488fb52aa6c4a06599f55d5f816c48ca2fad19daebd12b"} Dec 05 01:12:56 crc kubenswrapper[4665]: W1205 01:12:56.350630 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf3ba3f2_29d2_4a05_9a91_7e3ad7c20774.slice/crio-e3686e02ab16fa55c8ebcc167bf0ea6ca0b8f831d95cacb6baffa0e87ed1840b WatchSource:0}: Error finding container e3686e02ab16fa55c8ebcc167bf0ea6ca0b8f831d95cacb6baffa0e87ed1840b: Status 404 returned error can't find the container with id e3686e02ab16fa55c8ebcc167bf0ea6ca0b8f831d95cacb6baffa0e87ed1840b Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.351253 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.351799 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.851784596 +0000 UTC m=+152.191176895 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.358225 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" event={"ID":"29ef4f10-a6b6-4551-8067-0a82efc5651d","Type":"ContainerStarted","Data":"e0b75f55080154ed48257d2fd0948ef4064971098686ba87740ab7a4324715c3"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.364328 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" podStartSLOduration=132.364281989 podStartE2EDuration="2m12.364281989s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.332998329 +0000 UTC m=+151.672390628" watchObservedRunningTime="2025-12-05 01:12:56.364281989 +0000 UTC m=+151.703674288" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.384572 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" event={"ID":"b9a9d28c-11ea-4fef-ac00-13eb549271ff","Type":"ContainerStarted","Data":"083b903a6983bc07daf504c84922223a45e7fc0faefebe550bd1e272cc6cdab8"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.387607 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" event={"ID":"d82a4e9d-7232-4652-baec-7f0e395afd4b","Type":"ContainerStarted","Data":"164153df55074df615cf84f741741bd87abcc349019482771bc0aeae01a36848"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.403653 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" event={"ID":"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580","Type":"ContainerStarted","Data":"540f8a0062548c6c18d5e58fb57f2a1122bf8ca515ffd25804e473bcd88da918"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.404673 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.409424 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" podStartSLOduration=132.409414623 podStartE2EDuration="2m12.409414623s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.37383579 +0000 UTC m=+151.713228089" watchObservedRunningTime="2025-12-05 01:12:56.409414623 +0000 UTC m=+151.748806922" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.411516 4665 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tqfg7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.411653 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.432555 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-4m9lk" event={"ID":"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a","Type":"ContainerStarted","Data":"2a95e84f82e70c509647a6312dad3131ed8731245f5ac675d0c746bbe726b908"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.433991 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-7qh5v" podStartSLOduration=132.433976648 podStartE2EDuration="2m12.433976648s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.411179346 +0000 UTC m=+151.750571645" watchObservedRunningTime="2025-12-05 01:12:56.433976648 +0000 UTC m=+151.773368947" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.457949 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.459068 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:56.959055186 +0000 UTC m=+152.298447485 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.461416 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" event={"ID":"759b38bd-4ae5-40d9-ada0-c27a4d86dde8","Type":"ContainerStarted","Data":"6248bcd16dbbd89425a97436b7e44176c678cbb847ed4b5572c12c247ee65b34"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.461451 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" event={"ID":"759b38bd-4ae5-40d9-ada0-c27a4d86dde8","Type":"ContainerStarted","Data":"05cfb099177706b19adb49339858b24b1ffee5322865c8184e224c3892e05525"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.479173 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" event={"ID":"4eb1d49b-1c1f-448c-888a-63d7966b8480","Type":"ContainerStarted","Data":"2b97ba09385ec2fb6e8f51003bea3f18e3aaffe264c6296760e8dfb7e354ed09"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.507995 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" event={"ID":"4ebede5a-b1cb-4059-b158-ce9a85aed080","Type":"ContainerStarted","Data":"b891aeac9e55d3be8f96eddf16ed31aaaa939b60963396de3a36538c24a7079a"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.510795 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5jczg" event={"ID":"f8f58b77-587d-4631-acf8-eac2c8a3ce4f","Type":"ContainerStarted","Data":"532428df2795b207a13b1b083bb84b7112c3158f1df4a931288364bec3e398e5"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.512382 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" event={"ID":"2595c30a-ace8-4674-8691-4353a9e55c46","Type":"ContainerStarted","Data":"e22aa66f29bbb98c11aa01726996ff58c45da4a6555c333aa91825b443f2d350"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.516776 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" event={"ID":"55c71265-8dce-4127-ae57-1f0850d20a80","Type":"ContainerStarted","Data":"57ade58c533e45862b68cb2ab088935d8cd616b2f4d40d92082327350f595c5a"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.545985 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" event={"ID":"1ccf81f6-7188-4348-b61e-cb5f347e888a","Type":"ContainerStarted","Data":"5a9b8848eff5a0bc42f4176de9807c090123b32177ea087b5010c5a788162be8"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.555260 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-7lq4t" event={"ID":"e3424ab6-4a72-48f5-9fd2-72204d4e2a6b","Type":"ContainerStarted","Data":"bf80a3b1ee413f339a6b2cdc1469e2d9b4176f682bddbb13b1bf6a0a58034ffe"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.559042 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.560066 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.060051905 +0000 UTC m=+152.399444204 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.566144 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-qgtfk" event={"ID":"6ae00a40-d6ad-4d9d-9eca-8c09a9818801","Type":"ContainerStarted","Data":"261bd44c05f9c8b3dabc25b34defbe15ace70d464f73cbd47da913b548049ce4"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.566748 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-qgtfk" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.568328 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" event={"ID":"7bbd42a4-d665-4950-89b1-ff6c53ac0b60","Type":"ContainerStarted","Data":"5015717f52adfa44dd2fdc81996a05cd0bd41ab560ff8a56718f91f6378acb9c"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.575751 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" event={"ID":"7d763d3d-4925-4c67-8828-873c9a8dc973","Type":"ContainerStarted","Data":"3ee7246541a54c374e4b635e4b7c536a517e14b48b4f064b726e037436db57e0"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.576435 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" event={"ID":"945b1001-04d4-4312-ab2c-39bddf4851f2","Type":"ContainerStarted","Data":"2cab11f173525a361e54704db41e810e6449c25bc1a28abed49b9cfaab37fe4c"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.578080 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" event={"ID":"10397df3-a399-4278-8909-538e0b8c3c01","Type":"ContainerStarted","Data":"f0904cca8f30719845d8c2704bc3cbd204f770411d5320189073f048b5e37588"} Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.585625 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-thm7j" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.660403 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.660707 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.160696176 +0000 UTC m=+152.500088465 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.701728 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-qgtfk" podStartSLOduration=7.701707812 podStartE2EDuration="7.701707812s" podCreationTimestamp="2025-12-05 01:12:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.700125852 +0000 UTC m=+152.039518151" watchObservedRunningTime="2025-12-05 01:12:56.701707812 +0000 UTC m=+152.041100111" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.719427 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-x8jz5" podStartSLOduration=132.71941156 podStartE2EDuration="2m12.71941156s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.716311925 +0000 UTC m=+152.055704214" watchObservedRunningTime="2025-12-05 01:12:56.71941156 +0000 UTC m=+152.058803859" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.731240 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" podStartSLOduration=132.731224877 podStartE2EDuration="2m12.731224877s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.729569346 +0000 UTC m=+152.068961645" watchObservedRunningTime="2025-12-05 01:12:56.731224877 +0000 UTC m=+152.070617176" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.764588 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" podStartSLOduration=132.764569895 podStartE2EDuration="2m12.764569895s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.750441893 +0000 UTC m=+152.089834212" watchObservedRunningTime="2025-12-05 01:12:56.764569895 +0000 UTC m=+152.103962194" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.764988 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.765276 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.265261132 +0000 UTC m=+152.604653431 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.765630 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-7lq4t" podStartSLOduration=7.765624781 podStartE2EDuration="7.765624781s" podCreationTimestamp="2025-12-05 01:12:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.765552449 +0000 UTC m=+152.104944748" watchObservedRunningTime="2025-12-05 01:12:56.765624781 +0000 UTC m=+152.105017090" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.781983 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-bdr9q" podStartSLOduration=132.781967528 podStartE2EDuration="2m12.781967528s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.781226179 +0000 UTC m=+152.120618478" watchObservedRunningTime="2025-12-05 01:12:56.781967528 +0000 UTC m=+152.121359817" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.840418 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" podStartSLOduration=132.840402905 podStartE2EDuration="2m12.840402905s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:56.838410316 +0000 UTC m=+152.177802615" watchObservedRunningTime="2025-12-05 01:12:56.840402905 +0000 UTC m=+152.179795204" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.866181 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.866484 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.366471596 +0000 UTC m=+152.705863895 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.896437 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:12:56 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:12:56 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:12:56 crc kubenswrapper[4665]: healthz check failed Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.896504 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:12:56 crc kubenswrapper[4665]: I1205 01:12:56.968805 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:56 crc kubenswrapper[4665]: E1205 01:12:56.969225 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.469208088 +0000 UTC m=+152.808600387 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.078017 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.078602 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.578590841 +0000 UTC m=+152.917983140 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.180666 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.181071 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.681057866 +0000 UTC m=+153.020450165 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.285534 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.286107 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.786093433 +0000 UTC m=+153.125485732 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.387322 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.387571 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.887540382 +0000 UTC m=+153.226932681 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.387667 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.387938 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.887924123 +0000 UTC m=+153.227316422 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.489132 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.489442 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:57.989418693 +0000 UTC m=+153.328810992 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.583358 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" event={"ID":"4ebede5a-b1cb-4059-b158-ce9a85aed080","Type":"ContainerStarted","Data":"b60b82c009802106c1d1611ec6a894a7ac3c90c5fd92b97fad08d3a5a93f0cfa"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.584747 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" event={"ID":"945b1001-04d4-4312-ab2c-39bddf4851f2","Type":"ContainerStarted","Data":"dfe5a4b56d36c83298852d9b34c7b8056b4f933d5ba3bf111e748fb4037ffa2d"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.586096 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-4wbqn" event={"ID":"10397df3-a399-4278-8909-538e0b8c3c01","Type":"ContainerStarted","Data":"718e097a91dfa5ec716485b42bbf9c186c1d24d2e9c58f5ac53855b5efddaccf"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.588146 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" event={"ID":"55c71265-8dce-4127-ae57-1f0850d20a80","Type":"ContainerStarted","Data":"817757a360d8bef0c51199ec99d6c74452b03dc2f9a25125c62b85c342f23d68"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.588174 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" event={"ID":"55c71265-8dce-4127-ae57-1f0850d20a80","Type":"ContainerStarted","Data":"3d25547e67f9bb4c6627ce5adb137afb0e27285426aa57c29f4ba36f3b4b6661"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.588559 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.590009 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.605700 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" event={"ID":"759b38bd-4ae5-40d9-ada0-c27a4d86dde8","Type":"ContainerStarted","Data":"0cfcb048297034bde05efe61f638e032b48e12f949ddd23341664932dcf4c47d"} Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.606698 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.106680307 +0000 UTC m=+153.446072606 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.614584 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" event={"ID":"4eb1d49b-1c1f-448c-888a-63d7966b8480","Type":"ContainerStarted","Data":"d34796d909e7523df488c8ae005bd17488284714a70ba46971fcd010743d674a"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.614640 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" event={"ID":"4eb1d49b-1c1f-448c-888a-63d7966b8480","Type":"ContainerStarted","Data":"b23cf2d4f74df1dc67f6ff70a14330e8566a305ac11bd8e5c01ec3803fe66da3"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.615796 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" podStartSLOduration=134.615781288 podStartE2EDuration="2m14.615781288s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.610195342 +0000 UTC m=+152.949587631" watchObservedRunningTime="2025-12-05 01:12:57.615781288 +0000 UTC m=+152.955173587" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.619323 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-lx8ts" event={"ID":"1ccf81f6-7188-4348-b61e-cb5f347e888a","Type":"ContainerStarted","Data":"fb75f772bdbb58d608c30bdadd3595e3dc2aed02c214a368d475ab61cbd06ea2"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.625231 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" event={"ID":"7d763d3d-4925-4c67-8828-873c9a8dc973","Type":"ContainerStarted","Data":"71a24940cca08b69d9c814779f977d526205787de70c2638c3f2e45e5d0b5835"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.625276 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" event={"ID":"7d763d3d-4925-4c67-8828-873c9a8dc973","Type":"ContainerStarted","Data":"ccf9687bda0d821ab117ea03253fed8727c2241d16456575e13168e82ba1d0b3"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.626988 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" event={"ID":"2595c30a-ace8-4674-8691-4353a9e55c46","Type":"ContainerStarted","Data":"7fb0b4c98e61a511fbd0e2e00ceddde38aef33f3bd354c381954970c00887ed2"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.627015 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" event={"ID":"2595c30a-ace8-4674-8691-4353a9e55c46","Type":"ContainerStarted","Data":"683bbd412b1d16068c772a320b9fdc0c6ac0d68b98deff4a6eb0e8a4d4d8dd06"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.628263 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" event={"ID":"f20fec7f-c7bb-4bb5-b86c-076b8931aa97","Type":"ContainerStarted","Data":"649c8bf17a77885da6db31d690ed492cde5a84e5b4857734c87c5842dfc73dca"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.629615 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" event={"ID":"d82a4e9d-7232-4652-baec-7f0e395afd4b","Type":"ContainerStarted","Data":"21d67ce3db23272d024063fe4f645040c6940d0b4d1b46326ebaf86e29399dc6"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.629898 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.631037 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" event={"ID":"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774","Type":"ContainerStarted","Data":"4cc6872dc32f749b254f0d0e067f42dcd9bb84e170b5f4bb45d3d9685a27546b"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.631062 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" event={"ID":"cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774","Type":"ContainerStarted","Data":"e3686e02ab16fa55c8ebcc167bf0ea6ca0b8f831d95cacb6baffa0e87ed1840b"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.631571 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.633124 4665 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-9bxhg container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.633170 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" podUID="cf3ba3f2-29d2-4a05-9a91-7e3ad7c20774" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/healthz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.634401 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-4m9lk" event={"ID":"9c541bcd-d60a-4e73-8bcd-2d502eebbb9a","Type":"ContainerStarted","Data":"4682d4ff14c12f3a4eff4ee5196c906bf6ad0d1796e3944909d17826fb3b834b"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.634578 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.635392 4665 patch_prober.go:28] interesting pod/console-operator-58897d9998-4m9lk container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.635417 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-4m9lk" podUID="9c541bcd-d60a-4e73-8bcd-2d502eebbb9a" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.6:8443/readyz\": dial tcp 10.217.0.6:8443: connect: connection refused" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.635911 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" event={"ID":"29ef4f10-a6b6-4551-8067-0a82efc5651d","Type":"ContainerStarted","Data":"43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.636420 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.637167 4665 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-6bhrf container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.34:6443/healthz\": dial tcp 10.217.0.34:6443: connect: connection refused" start-of-body= Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.637189 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" podUID="29ef4f10-a6b6-4551-8067-0a82efc5651d" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.34:6443/healthz\": dial tcp 10.217.0.34:6443: connect: connection refused" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.638168 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" event={"ID":"b9a9d28c-11ea-4fef-ac00-13eb549271ff","Type":"ContainerStarted","Data":"7bf8f7c33b9a0a767fb33e01e36c440d006ef4d59959c3ab1e8f42dd24e8532a"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.638211 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" event={"ID":"b9a9d28c-11ea-4fef-ac00-13eb549271ff","Type":"ContainerStarted","Data":"bf8559836ac8aaeae395045420cc194cf22a735fbd7bc12f404238e6f44968ce"} Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.641383 4665 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tqfg7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.641422 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.673567 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-wdjtm" podStartSLOduration=133.673536849 podStartE2EDuration="2m13.673536849s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.671523479 +0000 UTC m=+153.010915778" watchObservedRunningTime="2025-12-05 01:12:57.673536849 +0000 UTC m=+153.012929148" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.675882 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" podStartSLOduration=133.675875905 podStartE2EDuration="2m13.675875905s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.646185295 +0000 UTC m=+152.985577584" watchObservedRunningTime="2025-12-05 01:12:57.675875905 +0000 UTC m=+153.015268204" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.691049 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.692165 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.19213872 +0000 UTC m=+153.531531019 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.700500 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-g5zmc" podStartSLOduration=133.700487402 podStartE2EDuration="2m13.700487402s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.700077173 +0000 UTC m=+153.039469462" watchObservedRunningTime="2025-12-05 01:12:57.700487402 +0000 UTC m=+153.039879701" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.705411 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2b4bm"] Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.706343 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.713337 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.736066 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2b4bm"] Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.762369 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" podStartSLOduration=133.762347962 podStartE2EDuration="2m13.762347962s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.76144892 +0000 UTC m=+153.100841219" watchObservedRunningTime="2025-12-05 01:12:57.762347962 +0000 UTC m=+153.101740261" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.792961 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxzjf\" (UniqueName: \"kubernetes.io/projected/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-kube-api-access-wxzjf\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.793048 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.793140 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-utilities\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.793359 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-catalog-content\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.796511 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.29649902 +0000 UTC m=+153.635891319 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.830439 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-k5gsl" podStartSLOduration=133.830421283 podStartE2EDuration="2m13.830421283s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.789024019 +0000 UTC m=+153.128416328" watchObservedRunningTime="2025-12-05 01:12:57.830421283 +0000 UTC m=+153.169813582" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.857281 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-snslb" podStartSLOduration=133.857264514 podStartE2EDuration="2m13.857264514s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.831599982 +0000 UTC m=+153.170992281" watchObservedRunningTime="2025-12-05 01:12:57.857264514 +0000 UTC m=+153.196656813" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.857528 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-xpmr7" podStartSLOduration=133.85752393 podStartE2EDuration="2m13.85752393s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.856483465 +0000 UTC m=+153.195875764" watchObservedRunningTime="2025-12-05 01:12:57.85752393 +0000 UTC m=+153.196916229" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.894850 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.895321 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-utilities\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.895372 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-catalog-content\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.895426 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxzjf\" (UniqueName: \"kubernetes.io/projected/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-kube-api-access-wxzjf\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.896337 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-utilities\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.896793 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-catalog-content\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.896864 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.396847794 +0000 UTC m=+153.736240093 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.899503 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:12:57 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:12:57 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:12:57 crc kubenswrapper[4665]: healthz check failed Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.899545 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.901286 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-h4gnf" podStartSLOduration=133.901270231 podStartE2EDuration="2m13.901270231s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.894713153 +0000 UTC m=+153.234105452" watchObservedRunningTime="2025-12-05 01:12:57.901270231 +0000 UTC m=+153.240662530" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.920274 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-4tdbf"] Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.921152 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.927898 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxzjf\" (UniqueName: \"kubernetes.io/projected/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-kube-api-access-wxzjf\") pod \"certified-operators-2b4bm\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.932385 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.963735 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" podStartSLOduration=133.963716436 podStartE2EDuration="2m13.963716436s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:57.939317493 +0000 UTC m=+153.278709792" watchObservedRunningTime="2025-12-05 01:12:57.963716436 +0000 UTC m=+153.303108735" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.964715 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tdbf"] Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.996150 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.996201 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-catalog-content\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.996251 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dwmv\" (UniqueName: \"kubernetes.io/projected/b37e5172-40b2-48e4-978e-cec68ac061e4-kube-api-access-8dwmv\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:57 crc kubenswrapper[4665]: I1205 01:12:57.996271 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-utilities\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:57 crc kubenswrapper[4665]: E1205 01:12:57.996538 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.496526601 +0000 UTC m=+153.835918900 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.024123 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.038872 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" podStartSLOduration=134.038855268 podStartE2EDuration="2m14.038855268s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:58.036938401 +0000 UTC m=+153.376330700" watchObservedRunningTime="2025-12-05 01:12:58.038855268 +0000 UTC m=+153.378247567" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.039426 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-sf489" podStartSLOduration=134.039421801 podStartE2EDuration="2m14.039421801s" podCreationTimestamp="2025-12-05 01:10:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:58.003153282 +0000 UTC m=+153.342545581" watchObservedRunningTime="2025-12-05 01:12:58.039421801 +0000 UTC m=+153.378814100" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.084960 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-4m9lk" podStartSLOduration=135.084944355 podStartE2EDuration="2m15.084944355s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:12:58.068520527 +0000 UTC m=+153.407912826" watchObservedRunningTime="2025-12-05 01:12:58.084944355 +0000 UTC m=+153.424336654" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.097103 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.097485 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-catalog-content\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.097536 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dwmv\" (UniqueName: \"kubernetes.io/projected/b37e5172-40b2-48e4-978e-cec68ac061e4-kube-api-access-8dwmv\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.097555 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-utilities\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.097984 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-utilities\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.098113 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.598090504 +0000 UTC m=+153.937482803 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.098336 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-catalog-content\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.118486 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-v24sw"] Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.119432 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.134123 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dwmv\" (UniqueName: \"kubernetes.io/projected/b37e5172-40b2-48e4-978e-cec68ac061e4-kube-api-access-8dwmv\") pod \"community-operators-4tdbf\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.169428 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v24sw"] Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.199284 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.199368 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-utilities\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.199410 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8bfw\" (UniqueName: \"kubernetes.io/projected/2494dbd6-0990-4ec2-9338-f0ef366f13f5-kube-api-access-x8bfw\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.199435 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-catalog-content\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.199749 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.699737429 +0000 UTC m=+154.039129728 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.268220 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.301820 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.302010 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-utilities\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.302082 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8bfw\" (UniqueName: \"kubernetes.io/projected/2494dbd6-0990-4ec2-9338-f0ef366f13f5-kube-api-access-x8bfw\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.302106 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-catalog-content\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.302515 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-catalog-content\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.302585 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.802569893 +0000 UTC m=+154.141962182 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.302780 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-utilities\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.338104 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8bfw\" (UniqueName: \"kubernetes.io/projected/2494dbd6-0990-4ec2-9338-f0ef366f13f5-kube-api-access-x8bfw\") pod \"certified-operators-v24sw\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.355314 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-w5xm7"] Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.356404 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.403351 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.403611 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:58.903599063 +0000 UTC m=+154.242991362 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.403733 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w5xm7"] Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.484097 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.507231 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.507451 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-utilities\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.507472 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6ndfn\" (UniqueName: \"kubernetes.io/projected/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-kube-api-access-6ndfn\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.507520 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-catalog-content\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.507672 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.007655146 +0000 UTC m=+154.347047445 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.609555 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-utilities\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.609780 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6ndfn\" (UniqueName: \"kubernetes.io/projected/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-kube-api-access-6ndfn\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.609818 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-catalog-content\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.609841 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.610356 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.110342977 +0000 UTC m=+154.449735276 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.610981 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-utilities\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.611325 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-catalog-content\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.630531 4665 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-55kmh container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.630598 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" podUID="d82a4e9d-7232-4652-baec-7f0e395afd4b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.662938 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6ndfn\" (UniqueName: \"kubernetes.io/projected/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-kube-api-access-6ndfn\") pod \"community-operators-w5xm7\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.676142 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5jczg" event={"ID":"f8f58b77-587d-4631-acf8-eac2c8a3ce4f","Type":"ContainerStarted","Data":"6874e497383b27467f323c06b25ac366b7873018bfd9ef05a16e4c8034407dcf"} Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.678174 4665 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-tqfg7 container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.678209 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.706931 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-9bxhg" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.710040 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.710888 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.711208 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.211194572 +0000 UTC m=+154.550586871 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.820334 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.825703 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.325678229 +0000 UTC m=+154.665070528 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.871132 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2b4bm"] Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.905161 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:12:58 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:12:58 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:12:58 crc kubenswrapper[4665]: healthz check failed Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.905208 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:12:58 crc kubenswrapper[4665]: I1205 01:12:58.923615 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:58 crc kubenswrapper[4665]: E1205 01:12:58.923896 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.42388227 +0000 UTC m=+154.763274569 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.024862 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.025166 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.525155766 +0000 UTC m=+154.864548065 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.126465 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.127089 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.627072228 +0000 UTC m=+154.966464527 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.181846 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-4m9lk" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.228939 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.229264 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.729251685 +0000 UTC m=+155.068643984 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.330691 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.331066 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.831042353 +0000 UTC m=+155.170434652 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.344087 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-4tdbf"] Dec 05 01:12:59 crc kubenswrapper[4665]: W1205 01:12:59.375616 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb37e5172_40b2_48e4_978e_cec68ac061e4.slice/crio-76e05a8a45f493dea40f57b0b61813030d0cf682d28f4739218903f1708bd02d WatchSource:0}: Error finding container 76e05a8a45f493dea40f57b0b61813030d0cf682d28f4739218903f1708bd02d: Status 404 returned error can't find the container with id 76e05a8a45f493dea40f57b0b61813030d0cf682d28f4739218903f1708bd02d Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.434477 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.434784 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:12:59.93477284 +0000 UTC m=+155.274165139 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.443398 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.466626 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-vl66w" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.535820 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.537809 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.037790677 +0000 UTC m=+155.377182976 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.551113 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.552375 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.566155 4665 patch_prober.go:28] interesting pod/console-f9d7485db-xv889 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.566217 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xv889" podUID="b88c79b6-4760-4509-bee0-06de439c6ac2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.638610 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-v24sw"] Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.647551 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.648808 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.148795389 +0000 UTC m=+155.488187688 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.677535 4665 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-55kmh container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.677593 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" podUID="d82a4e9d-7232-4652-baec-7f0e395afd4b" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.38:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.677639 4665 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-6bhrf container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.34:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.677651 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" podUID="29ef4f10-a6b6-4551-8067-0a82efc5651d" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.34:6443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.729395 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v24sw" event={"ID":"2494dbd6-0990-4ec2-9338-f0ef366f13f5","Type":"ContainerStarted","Data":"5f7bb0aeed4763c93401f179efd53ab48fd2640a102fc1c0692ced0212c65681"} Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.745801 4665 generic.go:334] "Generic (PLEG): container finished" podID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerID="6d3765fcaf6e1a9c5a3dd09ef162226d586567679a26c3766b9b8d4eefad520d" exitCode=0 Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.745896 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2b4bm" event={"ID":"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd","Type":"ContainerDied","Data":"6d3765fcaf6e1a9c5a3dd09ef162226d586567679a26c3766b9b8d4eefad520d"} Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.745922 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2b4bm" event={"ID":"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd","Type":"ContainerStarted","Data":"922548825f0afce7250641cf7bf615824e4e62bfc4ce607e444d90fc1a4d0ec9"} Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.748062 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.748422 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.248408665 +0000 UTC m=+155.587800964 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.749410 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.785446 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tdbf" event={"ID":"b37e5172-40b2-48e4-978e-cec68ac061e4","Type":"ContainerStarted","Data":"76e05a8a45f493dea40f57b0b61813030d0cf682d28f4739218903f1708bd02d"} Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.798262 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5jczg" event={"ID":"f8f58b77-587d-4631-acf8-eac2c8a3ce4f","Type":"ContainerStarted","Data":"a8aa110d1d97d073544ab5148377efdc795c1b7fa4b47b2e509a9e30c5f40a89"} Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.814449 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.814480 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.828403 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.845581 4665 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkzlk container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.845648 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-hkzlk" podUID="d808dde0-4fdc-4d21-a6f2-3c27f540018f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.845733 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-w5xm7"] Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.846250 4665 patch_prober.go:28] interesting pod/downloads-7954f5f757-hkzlk container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" start-of-body= Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.846270 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-hkzlk" podUID="d808dde0-4fdc-4d21-a6f2-3c27f540018f" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.18:8080/\": dial tcp 10.217.0.18:8080: connect: connection refused" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.849104 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.849483 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.349470436 +0000 UTC m=+155.688862735 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.863593 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.902912 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:12:59 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:12:59 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:12:59 crc kubenswrapper[4665]: healthz check failed Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.902980 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.912465 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pcrmz"] Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.913492 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.920067 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.951367 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:12:59 crc kubenswrapper[4665]: E1205 01:12:59.952760 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.45274501 +0000 UTC m=+155.792137309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:12:59 crc kubenswrapper[4665]: I1205 01:12:59.978570 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcrmz"] Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.056959 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-catalog-content\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.057217 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-msp4n\" (UniqueName: \"kubernetes.io/projected/06fcff7e-da06-4d77-abbf-361c5c23f666-kube-api-access-msp4n\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.057341 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-utilities\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.057451 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.057767 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.557756606 +0000 UTC m=+155.897148905 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.158705 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.159203 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-msp4n\" (UniqueName: \"kubernetes.io/projected/06fcff7e-da06-4d77-abbf-361c5c23f666-kube-api-access-msp4n\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.159232 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-utilities\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.159289 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-catalog-content\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.159691 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-catalog-content\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.159702 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.659688428 +0000 UTC m=+155.999080727 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.159897 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-utilities\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.195778 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-msp4n\" (UniqueName: \"kubernetes.io/projected/06fcff7e-da06-4d77-abbf-361c5c23f666-kube-api-access-msp4n\") pod \"redhat-marketplace-pcrmz\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.245358 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.261068 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.261412 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.761400005 +0000 UTC m=+156.100792304 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.308590 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wjvw8"] Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.309603 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.340828 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wjvw8"] Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.365055 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.365539 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.86550309 +0000 UTC m=+156.204895449 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.466452 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gmldx\" (UniqueName: \"kubernetes.io/projected/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-kube-api-access-gmldx\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.466793 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-catalog-content\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.466825 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.466898 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-utilities\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.467159 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:00.967143875 +0000 UTC m=+156.306536174 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.568404 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.568628 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-utilities\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.568668 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gmldx\" (UniqueName: \"kubernetes.io/projected/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-kube-api-access-gmldx\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.568688 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-catalog-content\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.569345 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-catalog-content\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.569413 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.069398624 +0000 UTC m=+156.408790913 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.569602 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-utilities\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.634244 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gmldx\" (UniqueName: \"kubernetes.io/projected/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-kube-api-access-gmldx\") pod \"redhat-marketplace-wjvw8\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.640682 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.673798 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.674131 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.174118384 +0000 UTC m=+156.513510683 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.775877 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.776039 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.276014904 +0000 UTC m=+156.615407203 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.776470 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.776798 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.276785634 +0000 UTC m=+156.616177933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.815630 4665 generic.go:334] "Generic (PLEG): container finished" podID="4ebede5a-b1cb-4059-b158-ce9a85aed080" containerID="b60b82c009802106c1d1611ec6a894a7ac3c90c5fd92b97fad08d3a5a93f0cfa" exitCode=0 Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.815726 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" event={"ID":"4ebede5a-b1cb-4059-b158-ce9a85aed080","Type":"ContainerDied","Data":"b60b82c009802106c1d1611ec6a894a7ac3c90c5fd92b97fad08d3a5a93f0cfa"} Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.819881 4665 generic.go:334] "Generic (PLEG): container finished" podID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerID="a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03" exitCode=0 Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.820287 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tdbf" event={"ID":"b37e5172-40b2-48e4-978e-cec68ac061e4","Type":"ContainerDied","Data":"a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03"} Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.835401 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5jczg" event={"ID":"f8f58b77-587d-4631-acf8-eac2c8a3ce4f","Type":"ContainerStarted","Data":"297cc21d016467e1a978757c70ce133fe3d7753a7f87b5067714f7f1fd474e47"} Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.844899 4665 generic.go:334] "Generic (PLEG): container finished" podID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerID="093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52" exitCode=0 Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.844960 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w5xm7" event={"ID":"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58","Type":"ContainerDied","Data":"093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52"} Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.844982 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w5xm7" event={"ID":"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58","Type":"ContainerStarted","Data":"b4526ed8e0fee10d731a656bb92db0b14bfcf3b7e7c0d9a54f3df3250c518f76"} Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.858502 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v24sw" event={"ID":"2494dbd6-0990-4ec2-9338-f0ef366f13f5","Type":"ContainerDied","Data":"879b9912bc24b6363df29eddc5c0f4043ccb698ac7783325aec8e030ed00d8ff"} Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.859413 4665 generic.go:334] "Generic (PLEG): container finished" podID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerID="879b9912bc24b6363df29eddc5c0f4043ccb698ac7783325aec8e030ed00d8ff" exitCode=0 Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.871637 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-d4sxm" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.877399 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:00 crc kubenswrapper[4665]: E1205 01:13:00.907052 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.407025092 +0000 UTC m=+156.746417391 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.917846 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:13:00 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:13:00 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:13:00 crc kubenswrapper[4665]: healthz check failed Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.917924 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.948006 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6zz57"] Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.949041 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6zz57"] Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.949131 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:00 crc kubenswrapper[4665]: I1205 01:13:00.955539 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.009419 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.011314 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.511283571 +0000 UTC m=+156.850675870 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.099666 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-mdh6k"] Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.101255 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.112063 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.112307 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9lxw\" (UniqueName: \"kubernetes.io/projected/a5720325-40b1-49f2-a8a2-39dc7aef289a-kube-api-access-z9lxw\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.112345 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-utilities\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.112434 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.612408383 +0000 UTC m=+156.951800682 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.112542 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-catalog-content\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.112606 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.112977 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.612970626 +0000 UTC m=+156.952362915 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.126093 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mdh6k"] Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.214908 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.215089 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-utilities\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.215124 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnlfp\" (UniqueName: \"kubernetes.io/projected/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-kube-api-access-tnlfp\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.215143 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-catalog-content\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.215190 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-catalog-content\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.215225 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-utilities\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.215244 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9lxw\" (UniqueName: \"kubernetes.io/projected/a5720325-40b1-49f2-a8a2-39dc7aef289a-kube-api-access-z9lxw\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.215581 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.715558994 +0000 UTC m=+157.054951293 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.215902 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-utilities\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.216113 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-catalog-content\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.265036 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9lxw\" (UniqueName: \"kubernetes.io/projected/a5720325-40b1-49f2-a8a2-39dc7aef289a-kube-api-access-z9lxw\") pod \"redhat-operators-6zz57\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.288480 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.322165 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-catalog-content\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.322993 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-catalog-content\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.323143 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-utilities\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.323203 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnlfp\" (UniqueName: \"kubernetes.io/projected/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-kube-api-access-tnlfp\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.323259 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.323580 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.823564874 +0000 UTC m=+157.162957173 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.323591 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-utilities\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.343571 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcrmz"] Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.371829 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnlfp\" (UniqueName: \"kubernetes.io/projected/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-kube-api-access-tnlfp\") pod \"redhat-operators-mdh6k\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.429175 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.429479 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:01.929462481 +0000 UTC m=+157.268854780 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.447231 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.450211 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wjvw8"] Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.477152 4665 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 01:13:01 crc kubenswrapper[4665]: W1205 01:13:01.489509 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8877b1c9_1d17_46eb_aa75_b0cd2b2c5424.slice/crio-b5973115dbdbe61e9ed3a588bbb88665c0887193998f6b86ff98114c196b09d9 WatchSource:0}: Error finding container b5973115dbdbe61e9ed3a588bbb88665c0887193998f6b86ff98114c196b09d9: Status 404 returned error can't find the container with id b5973115dbdbe61e9ed3a588bbb88665c0887193998f6b86ff98114c196b09d9 Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.530330 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.530711 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:02.030693816 +0000 UTC m=+157.370086115 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.633202 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.633566 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:02.13355112 +0000 UTC m=+157.472943419 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.666898 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.667872 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.673947 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.679172 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.689320 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.734924 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c557081-0907-4168-860c-a31f99c9fc79-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8c557081-0907-4168-860c-a31f99c9fc79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.735137 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c557081-0907-4168-860c-a31f99c9fc79-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8c557081-0907-4168-860c-a31f99c9fc79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.735221 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.735542 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:02.235530474 +0000 UTC m=+157.574922773 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.836176 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.836437 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c557081-0907-4168-860c-a31f99c9fc79-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8c557081-0907-4168-860c-a31f99c9fc79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.836468 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c557081-0907-4168-860c-a31f99c9fc79-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8c557081-0907-4168-860c-a31f99c9fc79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.836587 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c557081-0907-4168-860c-a31f99c9fc79-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8c557081-0907-4168-860c-a31f99c9fc79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.836652 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:02.336636965 +0000 UTC m=+157.676029264 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.881649 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c557081-0907-4168-860c-a31f99c9fc79-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8c557081-0907-4168-860c-a31f99c9fc79\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.890693 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.895095 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:13:01 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:13:01 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:13:01 crc kubenswrapper[4665]: healthz check failed Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.895150 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.906523 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-5jczg" event={"ID":"f8f58b77-587d-4631-acf8-eac2c8a3ce4f","Type":"ContainerStarted","Data":"aace283097c807abefc2c05aa2811126e7b062854c999c4a20a9440ec15ef173"} Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.927451 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wjvw8" event={"ID":"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424","Type":"ContainerStarted","Data":"b5973115dbdbe61e9ed3a588bbb88665c0887193998f6b86ff98114c196b09d9"} Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.937768 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:01 crc kubenswrapper[4665]: E1205 01:13:01.939266 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 01:13:02.439251293 +0000 UTC m=+157.778643592 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-xbw2d" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.947547 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-5jczg" podStartSLOduration=13.947529564 podStartE2EDuration="13.947529564s" podCreationTimestamp="2025-12-05 01:12:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:13:01.936999289 +0000 UTC m=+157.276391589" watchObservedRunningTime="2025-12-05 01:13:01.947529564 +0000 UTC m=+157.286921863" Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.955670 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcrmz" event={"ID":"06fcff7e-da06-4d77-abbf-361c5c23f666","Type":"ContainerDied","Data":"d6c237fb62fde4e580aaeb150f1f1ebbd873178e93efbd15aa592e0179c9edd9"} Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.955499 4665 generic.go:334] "Generic (PLEG): container finished" podID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerID="d6c237fb62fde4e580aaeb150f1f1ebbd873178e93efbd15aa592e0179c9edd9" exitCode=0 Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.956573 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcrmz" event={"ID":"06fcff7e-da06-4d77-abbf-361c5c23f666","Type":"ContainerStarted","Data":"b1bea4597696dd357ce2af50e66bffc3932b69fdc86030db48ec5747f1a23ae2"} Dec 05 01:13:01 crc kubenswrapper[4665]: I1205 01:13:01.987107 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.006325 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.036364 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6zz57"] Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.039338 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:02 crc kubenswrapper[4665]: E1205 01:13:02.040326 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 01:13:02.540291714 +0000 UTC m=+157.879684013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.113608 4665 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T01:13:01.477176678Z","Handler":null,"Name":""} Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.123347 4665 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.123708 4665 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.153117 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.223568 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-55kmh" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.250920 4665 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.250991 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.341965 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-mdh6k"] Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.416185 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-xbw2d\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.475816 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.533265 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.562337 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.679073 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pmfl\" (UniqueName: \"kubernetes.io/projected/4ebede5a-b1cb-4059-b158-ce9a85aed080-kube-api-access-4pmfl\") pod \"4ebede5a-b1cb-4059-b158-ce9a85aed080\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.679239 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ebede5a-b1cb-4059-b158-ce9a85aed080-secret-volume\") pod \"4ebede5a-b1cb-4059-b158-ce9a85aed080\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.679326 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume\") pod \"4ebede5a-b1cb-4059-b158-ce9a85aed080\" (UID: \"4ebede5a-b1cb-4059-b158-ce9a85aed080\") " Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.680335 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume" (OuterVolumeSpecName: "config-volume") pod "4ebede5a-b1cb-4059-b158-ce9a85aed080" (UID: "4ebede5a-b1cb-4059-b158-ce9a85aed080"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.685155 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ebede5a-b1cb-4059-b158-ce9a85aed080-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4ebede5a-b1cb-4059-b158-ce9a85aed080" (UID: "4ebede5a-b1cb-4059-b158-ce9a85aed080"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.685479 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ebede5a-b1cb-4059-b158-ce9a85aed080-kube-api-access-4pmfl" (OuterVolumeSpecName: "kube-api-access-4pmfl") pod "4ebede5a-b1cb-4059-b158-ce9a85aed080" (UID: "4ebede5a-b1cb-4059-b158-ce9a85aed080"). InnerVolumeSpecName "kube-api-access-4pmfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.717565 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.781845 4665 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4ebede5a-b1cb-4059-b158-ce9a85aed080-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.781868 4665 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4ebede5a-b1cb-4059-b158-ce9a85aed080-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.781878 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pmfl\" (UniqueName: \"kubernetes.io/projected/4ebede5a-b1cb-4059-b158-ce9a85aed080-kube-api-access-4pmfl\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.793096 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 01:13:02 crc kubenswrapper[4665]: W1205 01:13:02.850464 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod8c557081_0907_4168_860c_a31f99c9fc79.slice/crio-6fb16f7ff7a896406211ac2712bd461f6424e047fd03138295b5b5048cb4e07c WatchSource:0}: Error finding container 6fb16f7ff7a896406211ac2712bd461f6424e047fd03138295b5b5048cb4e07c: Status 404 returned error can't find the container with id 6fb16f7ff7a896406211ac2712bd461f6424e047fd03138295b5b5048cb4e07c Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.893580 4665 patch_prober.go:28] interesting pod/router-default-5444994796-7wl87 container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 01:13:02 crc kubenswrapper[4665]: [-]has-synced failed: reason withheld Dec 05 01:13:02 crc kubenswrapper[4665]: [+]process-running ok Dec 05 01:13:02 crc kubenswrapper[4665]: healthz check failed Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.893630 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-7wl87" podUID="191296ac-80a8-4b64-acf8-d0087ac08c79" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 01:13:02 crc kubenswrapper[4665]: I1205 01:13:02.925005 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.009454 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" event={"ID":"4ebede5a-b1cb-4059-b158-ce9a85aed080","Type":"ContainerDied","Data":"b891aeac9e55d3be8f96eddf16ed31aaaa939b60963396de3a36538c24a7079a"} Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.009529 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b891aeac9e55d3be8f96eddf16ed31aaaa939b60963396de3a36538c24a7079a" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.009624 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.022747 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8c557081-0907-4168-860c-a31f99c9fc79","Type":"ContainerStarted","Data":"6fb16f7ff7a896406211ac2712bd461f6424e047fd03138295b5b5048cb4e07c"} Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.027674 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zz57" event={"ID":"a5720325-40b1-49f2-a8a2-39dc7aef289a","Type":"ContainerStarted","Data":"fa932e1a96bd2ff5799f6dfc85730bdbb2c59eb410f1dde1320bd44186a14e35"} Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.065249 4665 generic.go:334] "Generic (PLEG): container finished" podID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerID="0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad" exitCode=0 Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.065910 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wjvw8" event={"ID":"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424","Type":"ContainerDied","Data":"0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad"} Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.089882 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdh6k" event={"ID":"e87a1b03-5914-4f5a-92f8-ddf1b18864d4","Type":"ContainerStarted","Data":"85fb6dd3797af3e35d1d3fbef0c1443add9f6a2cfabdb0f9ba422dd2c2433cfb"} Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.469294 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbw2d"] Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.902966 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.907878 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-7wl87" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.936797 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 01:13:03 crc kubenswrapper[4665]: E1205 01:13:03.936997 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ebede5a-b1cb-4059-b158-ce9a85aed080" containerName="collect-profiles" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.937009 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ebede5a-b1cb-4059-b158-ce9a85aed080" containerName="collect-profiles" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.937112 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ebede5a-b1cb-4059-b158-ce9a85aed080" containerName="collect-profiles" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.937465 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.940115 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.940263 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 01:13:03 crc kubenswrapper[4665]: I1205 01:13:03.956776 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.010577 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/797f2377-ec44-47b8-a29e-012c1d22f024-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"797f2377-ec44-47b8-a29e-012c1d22f024\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.010716 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/797f2377-ec44-47b8-a29e-012c1d22f024-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"797f2377-ec44-47b8-a29e-012c1d22f024\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.121715 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/797f2377-ec44-47b8-a29e-012c1d22f024-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"797f2377-ec44-47b8-a29e-012c1d22f024\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.121770 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/797f2377-ec44-47b8-a29e-012c1d22f024-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"797f2377-ec44-47b8-a29e-012c1d22f024\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.122071 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/797f2377-ec44-47b8-a29e-012c1d22f024-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"797f2377-ec44-47b8-a29e-012c1d22f024\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.129786 4665 generic.go:334] "Generic (PLEG): container finished" podID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerID="907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360" exitCode=0 Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.129870 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdh6k" event={"ID":"e87a1b03-5914-4f5a-92f8-ddf1b18864d4","Type":"ContainerDied","Data":"907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360"} Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.147244 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" event={"ID":"bef0c5a0-6fef-4199-8782-99bc78b35374","Type":"ContainerStarted","Data":"87fd3ea6d516b42da7703c27d1a32b6eaf99fb5087debd5dacf473163766dde7"} Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.149122 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8c557081-0907-4168-860c-a31f99c9fc79","Type":"ContainerStarted","Data":"fcb89ba8d605cecc0d046224a2e6f45beba59be4302a73b3c2dbd93cbd4f4f35"} Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.177032 4665 generic.go:334] "Generic (PLEG): container finished" podID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerID="20d0e4edd86a98e76b4a6cdaabc818cc45a774e03c870d685ec2857a5c401a56" exitCode=0 Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.177107 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/797f2377-ec44-47b8-a29e-012c1d22f024-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"797f2377-ec44-47b8-a29e-012c1d22f024\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.177215 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zz57" event={"ID":"a5720325-40b1-49f2-a8a2-39dc7aef289a","Type":"ContainerDied","Data":"20d0e4edd86a98e76b4a6cdaabc818cc45a774e03c870d685ec2857a5c401a56"} Dec 05 01:13:04 crc kubenswrapper[4665]: I1205 01:13:04.268605 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.049778 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.195367 4665 generic.go:334] "Generic (PLEG): container finished" podID="8c557081-0907-4168-860c-a31f99c9fc79" containerID="fcb89ba8d605cecc0d046224a2e6f45beba59be4302a73b3c2dbd93cbd4f4f35" exitCode=0 Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.195423 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8c557081-0907-4168-860c-a31f99c9fc79","Type":"ContainerDied","Data":"fcb89ba8d605cecc0d046224a2e6f45beba59be4302a73b3c2dbd93cbd4f4f35"} Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.198887 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"797f2377-ec44-47b8-a29e-012c1d22f024","Type":"ContainerStarted","Data":"2f97752895bb6d93545d7b02eae1c028e9de217b993aade6915293a74b2a4135"} Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.225559 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" event={"ID":"bef0c5a0-6fef-4199-8782-99bc78b35374","Type":"ContainerStarted","Data":"00fa475f62cc93092b18ad1797b5910e224a7dabfefea707b747c452169442ab"} Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.226507 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.242707 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" podStartSLOduration=142.242690754 podStartE2EDuration="2m22.242690754s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:13:05.241334451 +0000 UTC m=+160.580726760" watchObservedRunningTime="2025-12-05 01:13:05.242690754 +0000 UTC m=+160.582083053" Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.949872 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:13:05 crc kubenswrapper[4665]: I1205 01:13:05.972686 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/e1e639aa-4bf7-4baa-a332-62dffec786d8-metrics-certs\") pod \"network-metrics-daemon-xhbdk\" (UID: \"e1e639aa-4bf7-4baa-a332-62dffec786d8\") " pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.111446 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-xhbdk" Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.256457 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"797f2377-ec44-47b8-a29e-012c1d22f024","Type":"ContainerStarted","Data":"174115031a200460f3662a57b8f5a6af5896f9c5f0ec2517aeb7c8dbba3fdb28"} Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.284466 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.284446337 podStartE2EDuration="3.284446337s" podCreationTimestamp="2025-12-05 01:13:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:13:06.271977304 +0000 UTC m=+161.611369603" watchObservedRunningTime="2025-12-05 01:13:06.284446337 +0000 UTC m=+161.623838636" Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.696836 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.785176 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c557081-0907-4168-860c-a31f99c9fc79-kubelet-dir\") pod \"8c557081-0907-4168-860c-a31f99c9fc79\" (UID: \"8c557081-0907-4168-860c-a31f99c9fc79\") " Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.785279 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c557081-0907-4168-860c-a31f99c9fc79-kube-api-access\") pod \"8c557081-0907-4168-860c-a31f99c9fc79\" (UID: \"8c557081-0907-4168-860c-a31f99c9fc79\") " Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.786184 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8c557081-0907-4168-860c-a31f99c9fc79-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8c557081-0907-4168-860c-a31f99c9fc79" (UID: "8c557081-0907-4168-860c-a31f99c9fc79"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.801457 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8c557081-0907-4168-860c-a31f99c9fc79-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8c557081-0907-4168-860c-a31f99c9fc79" (UID: "8c557081-0907-4168-860c-a31f99c9fc79"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.821288 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-xhbdk"] Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.886061 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8c557081-0907-4168-860c-a31f99c9fc79-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.886096 4665 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8c557081-0907-4168-860c-a31f99c9fc79-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:06 crc kubenswrapper[4665]: I1205 01:13:06.977880 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-qgtfk" Dec 05 01:13:07 crc kubenswrapper[4665]: I1205 01:13:07.285404 4665 generic.go:334] "Generic (PLEG): container finished" podID="797f2377-ec44-47b8-a29e-012c1d22f024" containerID="174115031a200460f3662a57b8f5a6af5896f9c5f0ec2517aeb7c8dbba3fdb28" exitCode=0 Dec 05 01:13:07 crc kubenswrapper[4665]: I1205 01:13:07.285513 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"797f2377-ec44-47b8-a29e-012c1d22f024","Type":"ContainerDied","Data":"174115031a200460f3662a57b8f5a6af5896f9c5f0ec2517aeb7c8dbba3fdb28"} Dec 05 01:13:07 crc kubenswrapper[4665]: I1205 01:13:07.290719 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" event={"ID":"e1e639aa-4bf7-4baa-a332-62dffec786d8","Type":"ContainerStarted","Data":"9588e1db43170e727f844ae58161db3730673a69c84c3b275fcd6149b25d7ba5"} Dec 05 01:13:07 crc kubenswrapper[4665]: I1205 01:13:07.296695 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"8c557081-0907-4168-860c-a31f99c9fc79","Type":"ContainerDied","Data":"6fb16f7ff7a896406211ac2712bd461f6424e047fd03138295b5b5048cb4e07c"} Dec 05 01:13:07 crc kubenswrapper[4665]: I1205 01:13:07.296726 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6fb16f7ff7a896406211ac2712bd461f6424e047fd03138295b5b5048cb4e07c" Dec 05 01:13:07 crc kubenswrapper[4665]: I1205 01:13:07.296768 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 01:13:08 crc kubenswrapper[4665]: I1205 01:13:08.311214 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" event={"ID":"e1e639aa-4bf7-4baa-a332-62dffec786d8","Type":"ContainerStarted","Data":"56d5d8ae5cb9e19b51f4164b38184cd110e2c783e6db4be53caa66b6c226b749"} Dec 05 01:13:08 crc kubenswrapper[4665]: I1205 01:13:08.689649 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:08 crc kubenswrapper[4665]: I1205 01:13:08.728253 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/797f2377-ec44-47b8-a29e-012c1d22f024-kube-api-access\") pod \"797f2377-ec44-47b8-a29e-012c1d22f024\" (UID: \"797f2377-ec44-47b8-a29e-012c1d22f024\") " Dec 05 01:13:08 crc kubenswrapper[4665]: I1205 01:13:08.728481 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/797f2377-ec44-47b8-a29e-012c1d22f024-kubelet-dir\") pod \"797f2377-ec44-47b8-a29e-012c1d22f024\" (UID: \"797f2377-ec44-47b8-a29e-012c1d22f024\") " Dec 05 01:13:08 crc kubenswrapper[4665]: I1205 01:13:08.728672 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/797f2377-ec44-47b8-a29e-012c1d22f024-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "797f2377-ec44-47b8-a29e-012c1d22f024" (UID: "797f2377-ec44-47b8-a29e-012c1d22f024"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:13:08 crc kubenswrapper[4665]: I1205 01:13:08.750274 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/797f2377-ec44-47b8-a29e-012c1d22f024-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "797f2377-ec44-47b8-a29e-012c1d22f024" (UID: "797f2377-ec44-47b8-a29e-012c1d22f024"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:13:08 crc kubenswrapper[4665]: I1205 01:13:08.830178 4665 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/797f2377-ec44-47b8-a29e-012c1d22f024-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:08 crc kubenswrapper[4665]: I1205 01:13:08.830208 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/797f2377-ec44-47b8-a29e-012c1d22f024-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:09 crc kubenswrapper[4665]: I1205 01:13:09.326288 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-xhbdk" event={"ID":"e1e639aa-4bf7-4baa-a332-62dffec786d8","Type":"ContainerStarted","Data":"befba8edb9d7ed1f2844b6be5512d8e93c7d61b5dcbf46a15a68dadba73dbb75"} Dec 05 01:13:09 crc kubenswrapper[4665]: I1205 01:13:09.328423 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"797f2377-ec44-47b8-a29e-012c1d22f024","Type":"ContainerDied","Data":"2f97752895bb6d93545d7b02eae1c028e9de217b993aade6915293a74b2a4135"} Dec 05 01:13:09 crc kubenswrapper[4665]: I1205 01:13:09.328456 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2f97752895bb6d93545d7b02eae1c028e9de217b993aade6915293a74b2a4135" Dec 05 01:13:09 crc kubenswrapper[4665]: I1205 01:13:09.328479 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 01:13:09 crc kubenswrapper[4665]: I1205 01:13:09.551993 4665 patch_prober.go:28] interesting pod/console-f9d7485db-xv889 container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" start-of-body= Dec 05 01:13:09 crc kubenswrapper[4665]: I1205 01:13:09.553627 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-xv889" podUID="b88c79b6-4760-4509-bee0-06de439c6ac2" containerName="console" probeResult="failure" output="Get \"https://10.217.0.12:8443/health\": dial tcp 10.217.0.12:8443: connect: connection refused" Dec 05 01:13:09 crc kubenswrapper[4665]: I1205 01:13:09.849452 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-hkzlk" Dec 05 01:13:10 crc kubenswrapper[4665]: I1205 01:13:10.352472 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-xhbdk" podStartSLOduration=147.352455127 podStartE2EDuration="2m27.352455127s" podCreationTimestamp="2025-12-05 01:10:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:13:10.351988436 +0000 UTC m=+165.691380735" watchObservedRunningTime="2025-12-05 01:13:10.352455127 +0000 UTC m=+165.691847426" Dec 05 01:13:14 crc kubenswrapper[4665]: I1205 01:13:14.922606 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:13:14 crc kubenswrapper[4665]: I1205 01:13:14.922891 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:13:19 crc kubenswrapper[4665]: I1205 01:13:19.558538 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:13:19 crc kubenswrapper[4665]: I1205 01:13:19.567619 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:13:22 crc kubenswrapper[4665]: I1205 01:13:22.729537 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:13:31 crc kubenswrapper[4665]: I1205 01:13:31.137276 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 01:13:32 crc kubenswrapper[4665]: I1205 01:13:32.166675 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-794gc" Dec 05 01:13:34 crc kubenswrapper[4665]: E1205 01:13:34.331779 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 01:13:34 crc kubenswrapper[4665]: E1205 01:13:34.332225 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6ndfn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-w5xm7_openshift-marketplace(d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 01:13:34 crc kubenswrapper[4665]: E1205 01:13:34.333478 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-w5xm7" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.540227 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 01:13:37 crc kubenswrapper[4665]: E1205 01:13:37.540832 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8c557081-0907-4168-860c-a31f99c9fc79" containerName="pruner" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.540847 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8c557081-0907-4168-860c-a31f99c9fc79" containerName="pruner" Dec 05 01:13:37 crc kubenswrapper[4665]: E1205 01:13:37.540859 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="797f2377-ec44-47b8-a29e-012c1d22f024" containerName="pruner" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.540865 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="797f2377-ec44-47b8-a29e-012c1d22f024" containerName="pruner" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.540977 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="797f2377-ec44-47b8-a29e-012c1d22f024" containerName="pruner" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.540985 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8c557081-0907-4168-860c-a31f99c9fc79" containerName="pruner" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.541441 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.547499 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.547719 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.554339 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.649183 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.649316 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.750950 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.751266 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.751118 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.773711 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:37 crc kubenswrapper[4665]: I1205 01:13:37.870183 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:39 crc kubenswrapper[4665]: E1205 01:13:39.204240 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-w5xm7" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" Dec 05 01:13:39 crc kubenswrapper[4665]: E1205 01:13:39.280964 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 01:13:39 crc kubenswrapper[4665]: E1205 01:13:39.281277 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tnlfp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-mdh6k_openshift-marketplace(e87a1b03-5914-4f5a-92f8-ddf1b18864d4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 01:13:39 crc kubenswrapper[4665]: E1205 01:13:39.282495 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-mdh6k" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" Dec 05 01:13:39 crc kubenswrapper[4665]: E1205 01:13:39.293055 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 01:13:39 crc kubenswrapper[4665]: E1205 01:13:39.293223 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8dwmv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-4tdbf_openshift-marketplace(b37e5172-40b2-48e4-978e-cec68ac061e4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 01:13:39 crc kubenswrapper[4665]: E1205 01:13:39.294441 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-4tdbf" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" Dec 05 01:13:41 crc kubenswrapper[4665]: E1205 01:13:41.045701 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-mdh6k" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" Dec 05 01:13:41 crc kubenswrapper[4665]: E1205 01:13:41.046592 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-4tdbf" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" Dec 05 01:13:41 crc kubenswrapper[4665]: E1205 01:13:41.115153 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 01:13:41 crc kubenswrapper[4665]: E1205 01:13:41.115279 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x8bfw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-v24sw_openshift-marketplace(2494dbd6-0990-4ec2-9338-f0ef366f13f5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 01:13:41 crc kubenswrapper[4665]: E1205 01:13:41.116800 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-v24sw" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.199952 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-v24sw" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.274078 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.274227 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-msp4n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-pcrmz_openshift-marketplace(06fcff7e-da06-4d77-abbf-361c5c23f666): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.275527 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-pcrmz" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.314600 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.314737 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-gmldx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wjvw8_openshift-marketplace(8877b1c9-1d17-46eb-aa75-b0cd2b2c5424): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.315939 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-wjvw8" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.317125 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.317287 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z9lxw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-6zz57_openshift-marketplace(a5720325-40b1-49f2-a8a2-39dc7aef289a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.319090 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-6zz57" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.320340 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.320441 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wxzjf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-2b4bm_openshift-marketplace(cdf1b771-7028-4cbd-ae5f-23cdf3784ecd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.321943 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-2b4bm" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.522788 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-6zz57" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.522970 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wjvw8" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.523089 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-2b4bm" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" Dec 05 01:13:42 crc kubenswrapper[4665]: E1205 01:13:42.522909 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-pcrmz" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.611026 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.726672 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.730025 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.744570 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.819565 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-var-lock\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.819650 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-kubelet-dir\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.819695 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30d7d777-4450-4236-85fe-fe6924c43319-kube-api-access\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.920615 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-var-lock\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.920668 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-kubelet-dir\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.920702 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30d7d777-4450-4236-85fe-fe6924c43319-kube-api-access\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.920777 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-kubelet-dir\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.920777 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-var-lock\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:42 crc kubenswrapper[4665]: I1205 01:13:42.939254 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30d7d777-4450-4236-85fe-fe6924c43319-kube-api-access\") pod \"installer-9-crc\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:43 crc kubenswrapper[4665]: I1205 01:13:43.070804 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:13:43 crc kubenswrapper[4665]: I1205 01:13:43.249768 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 01:13:43 crc kubenswrapper[4665]: W1205 01:13:43.256775 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod30d7d777_4450_4236_85fe_fe6924c43319.slice/crio-c52782dda738f42e34ca4b651f9266438dfc341987e172ac6d565d56c40f9915 WatchSource:0}: Error finding container c52782dda738f42e34ca4b651f9266438dfc341987e172ac6d565d56c40f9915: Status 404 returned error can't find the container with id c52782dda738f42e34ca4b651f9266438dfc341987e172ac6d565d56c40f9915 Dec 05 01:13:43 crc kubenswrapper[4665]: I1205 01:13:43.523532 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"30d7d777-4450-4236-85fe-fe6924c43319","Type":"ContainerStarted","Data":"c52782dda738f42e34ca4b651f9266438dfc341987e172ac6d565d56c40f9915"} Dec 05 01:13:43 crc kubenswrapper[4665]: I1205 01:13:43.525186 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00","Type":"ContainerStarted","Data":"16e39d2febb0875e17552375a8d301e85b6ff85bca8126c8973dc0cdc9019200"} Dec 05 01:13:43 crc kubenswrapper[4665]: I1205 01:13:43.525217 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00","Type":"ContainerStarted","Data":"dfa12b3b4fb1fb9be18fd84baf19c591eba750d9ed220a3928fc66cba69f9e8e"} Dec 05 01:13:43 crc kubenswrapper[4665]: I1205 01:13:43.537788 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=6.537777238 podStartE2EDuration="6.537777238s" podCreationTimestamp="2025-12-05 01:13:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:13:43.537666764 +0000 UTC m=+198.877059063" watchObservedRunningTime="2025-12-05 01:13:43.537777238 +0000 UTC m=+198.877169537" Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.530581 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"30d7d777-4450-4236-85fe-fe6924c43319","Type":"ContainerStarted","Data":"4857624859ac0db75b628138162d08f7ed352be4f7e45165400af9a49c1fbdc0"} Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.531790 4665 generic.go:334] "Generic (PLEG): container finished" podID="ffc75658-8f25-4845-9cd6-1fdeaf8f0a00" containerID="16e39d2febb0875e17552375a8d301e85b6ff85bca8126c8973dc0cdc9019200" exitCode=0 Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.531833 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00","Type":"ContainerDied","Data":"16e39d2febb0875e17552375a8d301e85b6ff85bca8126c8973dc0cdc9019200"} Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.545683 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.545659011 podStartE2EDuration="2.545659011s" podCreationTimestamp="2025-12-05 01:13:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:13:44.545158908 +0000 UTC m=+199.884551207" watchObservedRunningTime="2025-12-05 01:13:44.545659011 +0000 UTC m=+199.885051310" Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.922245 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.922597 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.922642 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.923439 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:13:44 crc kubenswrapper[4665]: I1205 01:13:44.923586 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2" gracePeriod=600 Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.538025 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2"} Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.537947 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2" exitCode=0 Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.539601 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"d5bc07be9bd709e1be646d373aeef4fffe2def85b634b1bda0f9a262e2181517"} Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.782306 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.861909 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kubelet-dir\") pod \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\" (UID: \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\") " Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.862013 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kube-api-access\") pod \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\" (UID: \"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00\") " Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.862042 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "ffc75658-8f25-4845-9cd6-1fdeaf8f0a00" (UID: "ffc75658-8f25-4845-9cd6-1fdeaf8f0a00"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.862404 4665 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.867844 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "ffc75658-8f25-4845-9cd6-1fdeaf8f0a00" (UID: "ffc75658-8f25-4845-9cd6-1fdeaf8f0a00"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:13:45 crc kubenswrapper[4665]: I1205 01:13:45.964048 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/ffc75658-8f25-4845-9cd6-1fdeaf8f0a00-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 01:13:46 crc kubenswrapper[4665]: I1205 01:13:46.546579 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"ffc75658-8f25-4845-9cd6-1fdeaf8f0a00","Type":"ContainerDied","Data":"dfa12b3b4fb1fb9be18fd84baf19c591eba750d9ed220a3928fc66cba69f9e8e"} Dec 05 01:13:46 crc kubenswrapper[4665]: I1205 01:13:46.546955 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfa12b3b4fb1fb9be18fd84baf19c591eba750d9ed220a3928fc66cba69f9e8e" Dec 05 01:13:46 crc kubenswrapper[4665]: I1205 01:13:46.546602 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 01:13:53 crc kubenswrapper[4665]: I1205 01:13:53.578687 4665 generic.go:334] "Generic (PLEG): container finished" podID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerID="7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416" exitCode=0 Dec 05 01:13:53 crc kubenswrapper[4665]: I1205 01:13:53.580208 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdh6k" event={"ID":"e87a1b03-5914-4f5a-92f8-ddf1b18864d4","Type":"ContainerDied","Data":"7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416"} Dec 05 01:13:54 crc kubenswrapper[4665]: I1205 01:13:54.585221 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w5xm7" event={"ID":"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58","Type":"ContainerStarted","Data":"b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df"} Dec 05 01:13:54 crc kubenswrapper[4665]: I1205 01:13:54.600985 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdh6k" event={"ID":"e87a1b03-5914-4f5a-92f8-ddf1b18864d4","Type":"ContainerStarted","Data":"1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27"} Dec 05 01:13:54 crc kubenswrapper[4665]: I1205 01:13:54.627174 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-mdh6k" podStartSLOduration=3.822266429 podStartE2EDuration="53.627155143s" podCreationTimestamp="2025-12-05 01:13:01 +0000 UTC" firstStartedPulling="2025-12-05 01:13:04.143243151 +0000 UTC m=+159.482635450" lastFinishedPulling="2025-12-05 01:13:53.948131865 +0000 UTC m=+209.287524164" observedRunningTime="2025-12-05 01:13:54.626992549 +0000 UTC m=+209.966384858" watchObservedRunningTime="2025-12-05 01:13:54.627155143 +0000 UTC m=+209.966547442" Dec 05 01:13:55 crc kubenswrapper[4665]: I1205 01:13:55.606119 4665 generic.go:334] "Generic (PLEG): container finished" podID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerID="b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df" exitCode=0 Dec 05 01:13:55 crc kubenswrapper[4665]: I1205 01:13:55.606186 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w5xm7" event={"ID":"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58","Type":"ContainerDied","Data":"b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df"} Dec 05 01:13:55 crc kubenswrapper[4665]: I1205 01:13:55.609449 4665 generic.go:334] "Generic (PLEG): container finished" podID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerID="dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8" exitCode=0 Dec 05 01:13:55 crc kubenswrapper[4665]: I1205 01:13:55.609478 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wjvw8" event={"ID":"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424","Type":"ContainerDied","Data":"dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8"} Dec 05 01:13:56 crc kubenswrapper[4665]: I1205 01:13:56.614313 4665 generic.go:334] "Generic (PLEG): container finished" podID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerID="e74f541e759e3582e8398b9c46fa32df257c731961dd2c4e5f5314c8c3e81d22" exitCode=0 Dec 05 01:13:56 crc kubenswrapper[4665]: I1205 01:13:56.614580 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2b4bm" event={"ID":"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd","Type":"ContainerDied","Data":"e74f541e759e3582e8398b9c46fa32df257c731961dd2c4e5f5314c8c3e81d22"} Dec 05 01:13:56 crc kubenswrapper[4665]: I1205 01:13:56.618614 4665 generic.go:334] "Generic (PLEG): container finished" podID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerID="1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe" exitCode=0 Dec 05 01:13:56 crc kubenswrapper[4665]: I1205 01:13:56.618654 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tdbf" event={"ID":"b37e5172-40b2-48e4-978e-cec68ac061e4","Type":"ContainerDied","Data":"1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe"} Dec 05 01:13:59 crc kubenswrapper[4665]: I1205 01:13:59.634799 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w5xm7" event={"ID":"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58","Type":"ContainerStarted","Data":"c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e"} Dec 05 01:13:59 crc kubenswrapper[4665]: I1205 01:13:59.661113 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-w5xm7" podStartSLOduration=4.288898884 podStartE2EDuration="1m1.661098644s" podCreationTimestamp="2025-12-05 01:12:58 +0000 UTC" firstStartedPulling="2025-12-05 01:13:00.846563156 +0000 UTC m=+156.185955455" lastFinishedPulling="2025-12-05 01:13:58.218762916 +0000 UTC m=+213.558155215" observedRunningTime="2025-12-05 01:13:59.660658672 +0000 UTC m=+215.000050981" watchObservedRunningTime="2025-12-05 01:13:59.661098644 +0000 UTC m=+215.000490933" Dec 05 01:14:01 crc kubenswrapper[4665]: I1205 01:14:01.447955 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:14:01 crc kubenswrapper[4665]: I1205 01:14:01.448264 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:14:01 crc kubenswrapper[4665]: I1205 01:14:01.645548 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wjvw8" event={"ID":"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424","Type":"ContainerStarted","Data":"9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df"} Dec 05 01:14:01 crc kubenswrapper[4665]: I1205 01:14:01.665387 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wjvw8" podStartSLOduration=4.305049739 podStartE2EDuration="1m1.665363209s" podCreationTimestamp="2025-12-05 01:13:00 +0000 UTC" firstStartedPulling="2025-12-05 01:13:03.142535994 +0000 UTC m=+158.481928293" lastFinishedPulling="2025-12-05 01:14:00.502849464 +0000 UTC m=+215.842241763" observedRunningTime="2025-12-05 01:14:01.661842964 +0000 UTC m=+217.001235263" watchObservedRunningTime="2025-12-05 01:14:01.665363209 +0000 UTC m=+217.004755518" Dec 05 01:14:01 crc kubenswrapper[4665]: I1205 01:14:01.674458 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:14:01 crc kubenswrapper[4665]: I1205 01:14:01.710900 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:14:03 crc kubenswrapper[4665]: I1205 01:14:03.656929 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2b4bm" event={"ID":"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd","Type":"ContainerStarted","Data":"367224c4e94d47d5aa5afbf1e02214e4a9cdb1869484ea0c593146d354e8144f"} Dec 05 01:14:03 crc kubenswrapper[4665]: I1205 01:14:03.686562 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2b4bm" podStartSLOduration=3.477512932 podStartE2EDuration="1m6.686541784s" podCreationTimestamp="2025-12-05 01:12:57 +0000 UTC" firstStartedPulling="2025-12-05 01:12:59.749132572 +0000 UTC m=+155.088524871" lastFinishedPulling="2025-12-05 01:14:02.958161424 +0000 UTC m=+218.297553723" observedRunningTime="2025-12-05 01:14:03.682548795 +0000 UTC m=+219.021941114" watchObservedRunningTime="2025-12-05 01:14:03.686541784 +0000 UTC m=+219.025934093" Dec 05 01:14:04 crc kubenswrapper[4665]: I1205 01:14:04.663062 4665 generic.go:334] "Generic (PLEG): container finished" podID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerID="f399cd8f30d94b108d29790c5fc100a5129f7440dba4b7e8cef529009d29936d" exitCode=0 Dec 05 01:14:04 crc kubenswrapper[4665]: I1205 01:14:04.663131 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zz57" event={"ID":"a5720325-40b1-49f2-a8a2-39dc7aef289a","Type":"ContainerDied","Data":"f399cd8f30d94b108d29790c5fc100a5129f7440dba4b7e8cef529009d29936d"} Dec 05 01:14:04 crc kubenswrapper[4665]: I1205 01:14:04.665592 4665 generic.go:334] "Generic (PLEG): container finished" podID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerID="b3c46f8b48c4a0bbfb8ce768aba048b4191215a5aa800a979520c88d154d2f30" exitCode=0 Dec 05 01:14:04 crc kubenswrapper[4665]: I1205 01:14:04.665694 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v24sw" event={"ID":"2494dbd6-0990-4ec2-9338-f0ef366f13f5","Type":"ContainerDied","Data":"b3c46f8b48c4a0bbfb8ce768aba048b4191215a5aa800a979520c88d154d2f30"} Dec 05 01:14:04 crc kubenswrapper[4665]: I1205 01:14:04.668491 4665 generic.go:334] "Generic (PLEG): container finished" podID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerID="92dbf87c208d5758a822fe4db6bda572668b806c7d26ae377bd36227832ac28d" exitCode=0 Dec 05 01:14:04 crc kubenswrapper[4665]: I1205 01:14:04.668687 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcrmz" event={"ID":"06fcff7e-da06-4d77-abbf-361c5c23f666","Type":"ContainerDied","Data":"92dbf87c208d5758a822fe4db6bda572668b806c7d26ae377bd36227832ac28d"} Dec 05 01:14:04 crc kubenswrapper[4665]: I1205 01:14:04.672944 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tdbf" event={"ID":"b37e5172-40b2-48e4-978e-cec68ac061e4","Type":"ContainerStarted","Data":"d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a"} Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.002688 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-4tdbf" podStartSLOduration=5.024437286 podStartE2EDuration="1m8.002673143s" podCreationTimestamp="2025-12-05 01:12:57 +0000 UTC" firstStartedPulling="2025-12-05 01:13:00.823492126 +0000 UTC m=+156.162884425" lastFinishedPulling="2025-12-05 01:14:03.801727973 +0000 UTC m=+219.141120282" observedRunningTime="2025-12-05 01:14:04.73457888 +0000 UTC m=+220.073971209" watchObservedRunningTime="2025-12-05 01:14:05.002673143 +0000 UTC m=+220.342065442" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.005800 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mdh6k"] Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.006032 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-mdh6k" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerName="registry-server" containerID="cri-o://1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27" gracePeriod=2 Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.401918 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.515488 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnlfp\" (UniqueName: \"kubernetes.io/projected/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-kube-api-access-tnlfp\") pod \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.515804 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-utilities\") pod \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.515852 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-catalog-content\") pod \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\" (UID: \"e87a1b03-5914-4f5a-92f8-ddf1b18864d4\") " Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.517067 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-utilities" (OuterVolumeSpecName: "utilities") pod "e87a1b03-5914-4f5a-92f8-ddf1b18864d4" (UID: "e87a1b03-5914-4f5a-92f8-ddf1b18864d4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.524435 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-kube-api-access-tnlfp" (OuterVolumeSpecName: "kube-api-access-tnlfp") pod "e87a1b03-5914-4f5a-92f8-ddf1b18864d4" (UID: "e87a1b03-5914-4f5a-92f8-ddf1b18864d4"). InnerVolumeSpecName "kube-api-access-tnlfp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.616957 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.616990 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnlfp\" (UniqueName: \"kubernetes.io/projected/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-kube-api-access-tnlfp\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.636423 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e87a1b03-5914-4f5a-92f8-ddf1b18864d4" (UID: "e87a1b03-5914-4f5a-92f8-ddf1b18864d4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.678963 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcrmz" event={"ID":"06fcff7e-da06-4d77-abbf-361c5c23f666","Type":"ContainerStarted","Data":"9252797d687f184ea7261d6875b6da77873914400152bb515cb3b02b7bbc3cca"} Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.680718 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zz57" event={"ID":"a5720325-40b1-49f2-a8a2-39dc7aef289a","Type":"ContainerStarted","Data":"b7cdf2673a8fb651c2a2a0d8d05fc5c3e4d160a4f371560efa2aad057a25a3df"} Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.682566 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v24sw" event={"ID":"2494dbd6-0990-4ec2-9338-f0ef366f13f5","Type":"ContainerStarted","Data":"377830bc3883021c17159e5aa9a0221a0c7d68f8b18acfd84a71b663530a49e5"} Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.685002 4665 generic.go:334] "Generic (PLEG): container finished" podID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerID="1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27" exitCode=0 Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.685053 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdh6k" event={"ID":"e87a1b03-5914-4f5a-92f8-ddf1b18864d4","Type":"ContainerDied","Data":"1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27"} Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.685072 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-mdh6k" event={"ID":"e87a1b03-5914-4f5a-92f8-ddf1b18864d4","Type":"ContainerDied","Data":"85fb6dd3797af3e35d1d3fbef0c1443add9f6a2cfabdb0f9ba422dd2c2433cfb"} Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.685077 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-mdh6k" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.685088 4665 scope.go:117] "RemoveContainer" containerID="1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.698181 4665 scope.go:117] "RemoveContainer" containerID="7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.707100 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pcrmz" podStartSLOduration=3.4133670240000002 podStartE2EDuration="1m6.707075822s" podCreationTimestamp="2025-12-05 01:12:59 +0000 UTC" firstStartedPulling="2025-12-05 01:13:01.957082176 +0000 UTC m=+157.296474475" lastFinishedPulling="2025-12-05 01:14:05.250790974 +0000 UTC m=+220.590183273" observedRunningTime="2025-12-05 01:14:05.700349219 +0000 UTC m=+221.039741518" watchObservedRunningTime="2025-12-05 01:14:05.707075822 +0000 UTC m=+221.046468131" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.717825 4665 scope.go:117] "RemoveContainer" containerID="907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.717832 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e87a1b03-5914-4f5a-92f8-ddf1b18864d4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.735589 4665 scope.go:117] "RemoveContainer" containerID="1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27" Dec 05 01:14:05 crc kubenswrapper[4665]: E1205 01:14:05.736185 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27\": container with ID starting with 1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27 not found: ID does not exist" containerID="1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.736220 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27"} err="failed to get container status \"1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27\": rpc error: code = NotFound desc = could not find container \"1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27\": container with ID starting with 1ac989d85ac45fe9376454cc1911bb3200f3a86d69f84ee99a29e1765db35e27 not found: ID does not exist" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.736239 4665 scope.go:117] "RemoveContainer" containerID="7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416" Dec 05 01:14:05 crc kubenswrapper[4665]: E1205 01:14:05.738051 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416\": container with ID starting with 7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416 not found: ID does not exist" containerID="7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.738078 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416"} err="failed to get container status \"7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416\": rpc error: code = NotFound desc = could not find container \"7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416\": container with ID starting with 7ed07eae895ae955f955dc4891e277fa75d168beb003feaabf6fa2f9d3d60416 not found: ID does not exist" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.738092 4665 scope.go:117] "RemoveContainer" containerID="907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360" Dec 05 01:14:05 crc kubenswrapper[4665]: E1205 01:14:05.739750 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360\": container with ID starting with 907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360 not found: ID does not exist" containerID="907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.739776 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360"} err="failed to get container status \"907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360\": rpc error: code = NotFound desc = could not find container \"907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360\": container with ID starting with 907fed7440161b0b03b0fe55413f3a18f4ab0e14a569fc7135c1efb45211f360 not found: ID does not exist" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.755202 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6zz57" podStartSLOduration=4.783097986 podStartE2EDuration="1m5.755185558s" podCreationTimestamp="2025-12-05 01:13:00 +0000 UTC" firstStartedPulling="2025-12-05 01:13:04.180680439 +0000 UTC m=+159.520072738" lastFinishedPulling="2025-12-05 01:14:05.152768001 +0000 UTC m=+220.492160310" observedRunningTime="2025-12-05 01:14:05.7519292 +0000 UTC m=+221.091321519" watchObservedRunningTime="2025-12-05 01:14:05.755185558 +0000 UTC m=+221.094577857" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.756707 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-v24sw" podStartSLOduration=3.2821222629999998 podStartE2EDuration="1m7.75670031s" podCreationTimestamp="2025-12-05 01:12:58 +0000 UTC" firstStartedPulling="2025-12-05 01:13:00.862527513 +0000 UTC m=+156.201919812" lastFinishedPulling="2025-12-05 01:14:05.33710556 +0000 UTC m=+220.676497859" observedRunningTime="2025-12-05 01:14:05.728648228 +0000 UTC m=+221.068040537" watchObservedRunningTime="2025-12-05 01:14:05.75670031 +0000 UTC m=+221.096092609" Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.765340 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-mdh6k"] Dec 05 01:14:05 crc kubenswrapper[4665]: I1205 01:14:05.770499 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-mdh6k"] Dec 05 01:14:06 crc kubenswrapper[4665]: I1205 01:14:06.898286 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" path="/var/lib/kubelet/pods/e87a1b03-5914-4f5a-92f8-ddf1b18864d4/volumes" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.025225 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.025608 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.071037 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.269244 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.269591 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.307027 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.485027 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.485090 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.521959 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.712445 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.712489 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.748696 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.750401 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:14:08 crc kubenswrapper[4665]: I1205 01:14:08.755925 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:14:09 crc kubenswrapper[4665]: I1205 01:14:09.750536 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:14:10 crc kubenswrapper[4665]: I1205 01:14:10.245956 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:14:10 crc kubenswrapper[4665]: I1205 01:14:10.246159 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:14:10 crc kubenswrapper[4665]: I1205 01:14:10.296883 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:14:10 crc kubenswrapper[4665]: I1205 01:14:10.641624 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:14:10 crc kubenswrapper[4665]: I1205 01:14:10.642046 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:14:10 crc kubenswrapper[4665]: I1205 01:14:10.775482 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:14:11 crc kubenswrapper[4665]: I1205 01:14:11.014219 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:14:11 crc kubenswrapper[4665]: I1205 01:14:11.065571 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-6bhrf"] Dec 05 01:14:11 crc kubenswrapper[4665]: I1205 01:14:11.289206 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:14:11 crc kubenswrapper[4665]: I1205 01:14:11.289421 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:14:11 crc kubenswrapper[4665]: I1205 01:14:11.328533 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:14:11 crc kubenswrapper[4665]: I1205 01:14:11.753173 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:14:11 crc kubenswrapper[4665]: I1205 01:14:11.764615 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:14:12 crc kubenswrapper[4665]: I1205 01:14:12.809273 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w5xm7"] Dec 05 01:14:12 crc kubenswrapper[4665]: I1205 01:14:12.809755 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-w5xm7" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerName="registry-server" containerID="cri-o://c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e" gracePeriod=2 Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.404551 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wjvw8"] Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.661834 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.729955 4665 generic.go:334] "Generic (PLEG): container finished" podID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerID="c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e" exitCode=0 Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.730026 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-w5xm7" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.730059 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w5xm7" event={"ID":"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58","Type":"ContainerDied","Data":"c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e"} Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.730100 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-w5xm7" event={"ID":"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58","Type":"ContainerDied","Data":"b4526ed8e0fee10d731a656bb92db0b14bfcf3b7e7c0d9a54f3df3250c518f76"} Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.730121 4665 scope.go:117] "RemoveContainer" containerID="c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.730735 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wjvw8" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerName="registry-server" containerID="cri-o://9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df" gracePeriod=2 Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.747185 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-utilities\") pod \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.747285 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-catalog-content\") pod \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.747346 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ndfn\" (UniqueName: \"kubernetes.io/projected/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-kube-api-access-6ndfn\") pod \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\" (UID: \"d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58\") " Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.748058 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-utilities" (OuterVolumeSpecName: "utilities") pod "d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" (UID: "d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.748972 4665 scope.go:117] "RemoveContainer" containerID="b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.763458 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-kube-api-access-6ndfn" (OuterVolumeSpecName: "kube-api-access-6ndfn") pod "d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" (UID: "d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58"). InnerVolumeSpecName "kube-api-access-6ndfn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.805149 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" (UID: "d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.832351 4665 scope.go:117] "RemoveContainer" containerID="093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.848808 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.849203 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ndfn\" (UniqueName: \"kubernetes.io/projected/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-kube-api-access-6ndfn\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.849217 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.863123 4665 scope.go:117] "RemoveContainer" containerID="c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e" Dec 05 01:14:13 crc kubenswrapper[4665]: E1205 01:14:13.863636 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e\": container with ID starting with c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e not found: ID does not exist" containerID="c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.863673 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e"} err="failed to get container status \"c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e\": rpc error: code = NotFound desc = could not find container \"c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e\": container with ID starting with c03286b7dc1e54297e997abfd052ef8e35b2070b913be28459e4b8e640cc315e not found: ID does not exist" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.863717 4665 scope.go:117] "RemoveContainer" containerID="b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df" Dec 05 01:14:13 crc kubenswrapper[4665]: E1205 01:14:13.864129 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df\": container with ID starting with b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df not found: ID does not exist" containerID="b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.864206 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df"} err="failed to get container status \"b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df\": rpc error: code = NotFound desc = could not find container \"b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df\": container with ID starting with b77cb71861fcb53868fd64f13fabf8a5b4a1c073b9336aea8e47667898e452df not found: ID does not exist" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.864241 4665 scope.go:117] "RemoveContainer" containerID="093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52" Dec 05 01:14:13 crc kubenswrapper[4665]: E1205 01:14:13.864580 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52\": container with ID starting with 093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52 not found: ID does not exist" containerID="093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52" Dec 05 01:14:13 crc kubenswrapper[4665]: I1205 01:14:13.864614 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52"} err="failed to get container status \"093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52\": rpc error: code = NotFound desc = could not find container \"093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52\": container with ID starting with 093c4ab4e6f7f7db9770994d07da2fae1ee5c4d1d4d76193b0b4ca6fce373b52 not found: ID does not exist" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.075440 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.087890 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-w5xm7"] Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.092066 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-w5xm7"] Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.153447 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-utilities\") pod \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.153826 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-catalog-content\") pod \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.153964 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gmldx\" (UniqueName: \"kubernetes.io/projected/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-kube-api-access-gmldx\") pod \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\" (UID: \"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424\") " Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.154476 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-utilities" (OuterVolumeSpecName: "utilities") pod "8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" (UID: "8877b1c9-1d17-46eb-aa75-b0cd2b2c5424"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.155864 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.166978 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-kube-api-access-gmldx" (OuterVolumeSpecName: "kube-api-access-gmldx") pod "8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" (UID: "8877b1c9-1d17-46eb-aa75-b0cd2b2c5424"). InnerVolumeSpecName "kube-api-access-gmldx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.184935 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" (UID: "8877b1c9-1d17-46eb-aa75-b0cd2b2c5424"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.257207 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.257267 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gmldx\" (UniqueName: \"kubernetes.io/projected/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424-kube-api-access-gmldx\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.740477 4665 generic.go:334] "Generic (PLEG): container finished" podID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerID="9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df" exitCode=0 Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.740544 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wjvw8" event={"ID":"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424","Type":"ContainerDied","Data":"9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df"} Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.740598 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wjvw8" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.740881 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wjvw8" event={"ID":"8877b1c9-1d17-46eb-aa75-b0cd2b2c5424","Type":"ContainerDied","Data":"b5973115dbdbe61e9ed3a588bbb88665c0887193998f6b86ff98114c196b09d9"} Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.741026 4665 scope.go:117] "RemoveContainer" containerID="9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.757329 4665 scope.go:117] "RemoveContainer" containerID="dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.774493 4665 scope.go:117] "RemoveContainer" containerID="0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.775506 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wjvw8"] Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.777694 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wjvw8"] Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.789577 4665 scope.go:117] "RemoveContainer" containerID="9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df" Dec 05 01:14:14 crc kubenswrapper[4665]: E1205 01:14:14.790066 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df\": container with ID starting with 9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df not found: ID does not exist" containerID="9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.790109 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df"} err="failed to get container status \"9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df\": rpc error: code = NotFound desc = could not find container \"9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df\": container with ID starting with 9be63410375e14769f1afa1d3e0970bb791657dc6f232d18494b4d7bca80b7df not found: ID does not exist" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.790140 4665 scope.go:117] "RemoveContainer" containerID="dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8" Dec 05 01:14:14 crc kubenswrapper[4665]: E1205 01:14:14.790749 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8\": container with ID starting with dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8 not found: ID does not exist" containerID="dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.790789 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8"} err="failed to get container status \"dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8\": rpc error: code = NotFound desc = could not find container \"dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8\": container with ID starting with dbc9d5cb07aecfd29072234167cb25cc3f8e3aac1ee2ad0d5fc06bc040e230c8 not found: ID does not exist" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.790820 4665 scope.go:117] "RemoveContainer" containerID="0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad" Dec 05 01:14:14 crc kubenswrapper[4665]: E1205 01:14:14.791169 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad\": container with ID starting with 0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad not found: ID does not exist" containerID="0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.791193 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad"} err="failed to get container status \"0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad\": rpc error: code = NotFound desc = could not find container \"0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad\": container with ID starting with 0d1183c14223ecb2812e6e8dc3db5b220b6fab893b53e68526461e7055a969ad not found: ID does not exist" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.899207 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" path="/var/lib/kubelet/pods/8877b1c9-1d17-46eb-aa75-b0cd2b2c5424/volumes" Dec 05 01:14:14 crc kubenswrapper[4665]: I1205 01:14:14.901958 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" path="/var/lib/kubelet/pods/d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58/volumes" Dec 05 01:14:18 crc kubenswrapper[4665]: I1205 01:14:18.528284 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.126408 4665 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.126974 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerName="extract-utilities" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.126990 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerName="extract-utilities" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127006 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerName="extract-content" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127015 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerName="extract-content" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127033 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127041 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127052 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerName="extract-content" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127059 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerName="extract-content" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127070 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerName="extract-utilities" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127076 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerName="extract-utilities" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127088 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerName="extract-utilities" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127095 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerName="extract-utilities" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127107 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127115 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127126 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerName="extract-content" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127134 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerName="extract-content" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127146 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127154 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.127163 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffc75658-8f25-4845-9cd6-1fdeaf8f0a00" containerName="pruner" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127170 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffc75658-8f25-4845-9cd6-1fdeaf8f0a00" containerName="pruner" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127290 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8877b1c9-1d17-46eb-aa75-b0cd2b2c5424" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127324 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="e87a1b03-5914-4f5a-92f8-ddf1b18864d4" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127339 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffc75658-8f25-4845-9cd6-1fdeaf8f0a00" containerName="pruner" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127351 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8b8a9a4-009c-4811-bbac-9d7a3ed7fd58" containerName="registry-server" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.127874 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128467 4665 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128491 4665 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.128679 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128699 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.128712 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128719 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.128728 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128734 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.128743 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128749 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.128756 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128763 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.128773 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128780 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128879 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128890 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128900 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128911 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128918 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.128926 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.129029 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.129037 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.130690 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19" gracePeriod=15 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.130845 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420" gracePeriod=15 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.131129 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f" gracePeriod=15 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.131145 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba" gracePeriod=15 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.131169 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad" gracePeriod=15 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.180885 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.246672 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.246726 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.246752 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.246769 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.246811 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.246854 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.246885 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.247075 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348479 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348736 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348765 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348782 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348799 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348835 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348854 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348871 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348930 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348964 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.348984 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.349005 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.349025 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.349045 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.349065 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.349086 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.488447 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:14:21 crc kubenswrapper[4665]: E1205 01:14:21.526905 4665 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.236:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e2cb56196313a openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 01:14:21.513748794 +0000 UTC m=+236.853141093,LastTimestamp:2025-12-05 01:14:21.513748794 +0000 UTC m=+236.853141093,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.787214 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820"} Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.787836 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"3c987195f48b40b429fdbcbbc922034c1bf5b368bc89c95f382a73f9737cb2e1"} Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.788707 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.789077 4665 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.790024 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.791128 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.791936 4665 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420" exitCode=0 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.792074 4665 scope.go:117] "RemoveContainer" containerID="0a701d29a4922fb6b33ca3a2f614f232d84777c8e0a1bb56101bb7c7d1cdc241" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.792091 4665 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f" exitCode=0 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.792245 4665 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad" exitCode=0 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.792264 4665 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba" exitCode=2 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.793995 4665 generic.go:334] "Generic (PLEG): container finished" podID="30d7d777-4450-4236-85fe-fe6924c43319" containerID="4857624859ac0db75b628138162d08f7ed352be4f7e45165400af9a49c1fbdc0" exitCode=0 Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.794099 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"30d7d777-4450-4236-85fe-fe6924c43319","Type":"ContainerDied","Data":"4857624859ac0db75b628138162d08f7ed352be4f7e45165400af9a49c1fbdc0"} Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.794809 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.795222 4665 status_manager.go:851] "Failed to get status for pod" podUID="30d7d777-4450-4236-85fe-fe6924c43319" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:21 crc kubenswrapper[4665]: I1205 01:14:21.795607 4665 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:22 crc kubenswrapper[4665]: I1205 01:14:22.800981 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.035752 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.036313 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.036769 4665 status_manager.go:851] "Failed to get status for pod" podUID="30d7d777-4450-4236-85fe-fe6924c43319" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.170916 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-kubelet-dir\") pod \"30d7d777-4450-4236-85fe-fe6924c43319\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.171009 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-var-lock\") pod \"30d7d777-4450-4236-85fe-fe6924c43319\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.171018 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "30d7d777-4450-4236-85fe-fe6924c43319" (UID: "30d7d777-4450-4236-85fe-fe6924c43319"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.171094 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30d7d777-4450-4236-85fe-fe6924c43319-kube-api-access\") pod \"30d7d777-4450-4236-85fe-fe6924c43319\" (UID: \"30d7d777-4450-4236-85fe-fe6924c43319\") " Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.171159 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-var-lock" (OuterVolumeSpecName: "var-lock") pod "30d7d777-4450-4236-85fe-fe6924c43319" (UID: "30d7d777-4450-4236-85fe-fe6924c43319"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.171422 4665 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.171439 4665 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/30d7d777-4450-4236-85fe-fe6924c43319-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.179558 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30d7d777-4450-4236-85fe-fe6924c43319-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "30d7d777-4450-4236-85fe-fe6924c43319" (UID: "30d7d777-4450-4236-85fe-fe6924c43319"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.273672 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30d7d777-4450-4236-85fe-fe6924c43319-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.512824 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.513917 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.514530 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.514900 4665 status_manager.go:851] "Failed to get status for pod" podUID="30d7d777-4450-4236-85fe-fe6924c43319" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.515181 4665 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.576133 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.576218 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.576324 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.576528 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.576528 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.576588 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.677609 4665 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.677640 4665 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.677650 4665 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.811393 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.812425 4665 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19" exitCode=0 Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.812540 4665 scope.go:117] "RemoveContainer" containerID="071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.812585 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.816795 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"30d7d777-4450-4236-85fe-fe6924c43319","Type":"ContainerDied","Data":"c52782dda738f42e34ca4b651f9266438dfc341987e172ac6d565d56c40f9915"} Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.816994 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c52782dda738f42e34ca4b651f9266438dfc341987e172ac6d565d56c40f9915" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.816882 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.829608 4665 scope.go:117] "RemoveContainer" containerID="25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.844017 4665 status_manager.go:851] "Failed to get status for pod" podUID="30d7d777-4450-4236-85fe-fe6924c43319" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.844695 4665 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.844963 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.849915 4665 scope.go:117] "RemoveContainer" containerID="ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.851771 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.852166 4665 status_manager.go:851] "Failed to get status for pod" podUID="30d7d777-4450-4236-85fe-fe6924c43319" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.852478 4665 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.869105 4665 scope.go:117] "RemoveContainer" containerID="d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.898754 4665 scope.go:117] "RemoveContainer" containerID="00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.918494 4665 scope.go:117] "RemoveContainer" containerID="273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.939173 4665 scope.go:117] "RemoveContainer" containerID="071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420" Dec 05 01:14:23 crc kubenswrapper[4665]: E1205 01:14:23.939669 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\": container with ID starting with 071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420 not found: ID does not exist" containerID="071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.939728 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420"} err="failed to get container status \"071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\": rpc error: code = NotFound desc = could not find container \"071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420\": container with ID starting with 071b5e498c0e2cca78dbf321dadcf7d9600c36db88bbc106d127edb7060e7420 not found: ID does not exist" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.939759 4665 scope.go:117] "RemoveContainer" containerID="25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f" Dec 05 01:14:23 crc kubenswrapper[4665]: E1205 01:14:23.940101 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\": container with ID starting with 25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f not found: ID does not exist" containerID="25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.940141 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f"} err="failed to get container status \"25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\": rpc error: code = NotFound desc = could not find container \"25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f\": container with ID starting with 25c5bae8f6c5851e56d2cc7e15e0d64a47d30a49efaa27f2826e610965e4ae0f not found: ID does not exist" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.940176 4665 scope.go:117] "RemoveContainer" containerID="ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad" Dec 05 01:14:23 crc kubenswrapper[4665]: E1205 01:14:23.940552 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\": container with ID starting with ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad not found: ID does not exist" containerID="ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.940592 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad"} err="failed to get container status \"ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\": rpc error: code = NotFound desc = could not find container \"ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad\": container with ID starting with ba8209a1d264a5ac2cfc927bd116467e9e259acf1dbad90123c49902182e95ad not found: ID does not exist" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.940624 4665 scope.go:117] "RemoveContainer" containerID="d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba" Dec 05 01:14:23 crc kubenswrapper[4665]: E1205 01:14:23.940893 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\": container with ID starting with d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba not found: ID does not exist" containerID="d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.940922 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba"} err="failed to get container status \"d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\": rpc error: code = NotFound desc = could not find container \"d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba\": container with ID starting with d449c84d04a9bf15b1d39dd43d723c711188999282b2a46f2f7e4d668a923bba not found: ID does not exist" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.940942 4665 scope.go:117] "RemoveContainer" containerID="00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19" Dec 05 01:14:23 crc kubenswrapper[4665]: E1205 01:14:23.941229 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\": container with ID starting with 00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19 not found: ID does not exist" containerID="00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.941274 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19"} err="failed to get container status \"00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\": rpc error: code = NotFound desc = could not find container \"00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19\": container with ID starting with 00467dc96fc6830fdadd64e23371a24f4976fe52d04489d943ef3d84e220ca19 not found: ID does not exist" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.941337 4665 scope.go:117] "RemoveContainer" containerID="273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff" Dec 05 01:14:23 crc kubenswrapper[4665]: E1205 01:14:23.941648 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\": container with ID starting with 273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff not found: ID does not exist" containerID="273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff" Dec 05 01:14:23 crc kubenswrapper[4665]: I1205 01:14:23.941707 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff"} err="failed to get container status \"273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\": rpc error: code = NotFound desc = could not find container \"273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff\": container with ID starting with 273b6920922fddce6bbb5c455e0a927c21c2c0b6497957ad902cb842a3c1b5ff not found: ID does not exist" Dec 05 01:14:24 crc kubenswrapper[4665]: E1205 01:14:24.394233 4665 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.236:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e2cb56196313a openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 01:14:21.513748794 +0000 UTC m=+236.853141093,LastTimestamp:2025-12-05 01:14:21.513748794 +0000 UTC m=+236.853141093,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 01:14:24 crc kubenswrapper[4665]: I1205 01:14:24.905459 4665 status_manager.go:851] "Failed to get status for pod" podUID="30d7d777-4450-4236-85fe-fe6924c43319" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:24 crc kubenswrapper[4665]: I1205 01:14:24.905919 4665 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:24 crc kubenswrapper[4665]: I1205 01:14:24.906085 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:24 crc kubenswrapper[4665]: I1205 01:14:24.907789 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 01:14:29 crc kubenswrapper[4665]: E1205 01:14:29.591195 4665 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:29 crc kubenswrapper[4665]: E1205 01:14:29.591976 4665 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:29 crc kubenswrapper[4665]: E1205 01:14:29.592477 4665 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:29 crc kubenswrapper[4665]: E1205 01:14:29.592763 4665 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:29 crc kubenswrapper[4665]: E1205 01:14:29.593020 4665 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:29 crc kubenswrapper[4665]: I1205 01:14:29.593053 4665 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 01:14:29 crc kubenswrapper[4665]: E1205 01:14:29.593337 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="200ms" Dec 05 01:14:29 crc kubenswrapper[4665]: E1205 01:14:29.793630 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="400ms" Dec 05 01:14:30 crc kubenswrapper[4665]: E1205 01:14:30.194398 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="800ms" Dec 05 01:14:30 crc kubenswrapper[4665]: E1205 01:14:30.995214 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="1.6s" Dec 05 01:14:31 crc kubenswrapper[4665]: E1205 01:14:31.039812 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:14:31Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:14:31Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:14:31Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T01:14:31Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:31 crc kubenswrapper[4665]: E1205 01:14:31.040631 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:31 crc kubenswrapper[4665]: E1205 01:14:31.041113 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:31 crc kubenswrapper[4665]: E1205 01:14:31.041651 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:31 crc kubenswrapper[4665]: E1205 01:14:31.042967 4665 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:31 crc kubenswrapper[4665]: E1205 01:14:31.042992 4665 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 01:14:31 crc kubenswrapper[4665]: I1205 01:14:31.892951 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:31 crc kubenswrapper[4665]: I1205 01:14:31.896143 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:31 crc kubenswrapper[4665]: I1205 01:14:31.896494 4665 status_manager.go:851] "Failed to get status for pod" podUID="30d7d777-4450-4236-85fe-fe6924c43319" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:31 crc kubenswrapper[4665]: I1205 01:14:31.910852 4665 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:31 crc kubenswrapper[4665]: I1205 01:14:31.910883 4665 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:31 crc kubenswrapper[4665]: E1205 01:14:31.911254 4665 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:31 crc kubenswrapper[4665]: I1205 01:14:31.911826 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:32 crc kubenswrapper[4665]: E1205 01:14:32.596436 4665 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.236:6443: connect: connection refused" interval="3.2s" Dec 05 01:14:32 crc kubenswrapper[4665]: I1205 01:14:32.877097 4665 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="34fc01393f0902ca9510cbe65ff30343793d5512199a9962ed6d3740a9831f17" exitCode=0 Dec 05 01:14:32 crc kubenswrapper[4665]: I1205 01:14:32.877142 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"34fc01393f0902ca9510cbe65ff30343793d5512199a9962ed6d3740a9831f17"} Dec 05 01:14:32 crc kubenswrapper[4665]: I1205 01:14:32.877168 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4f5b0e31dd85f4bfa2e762b0212efc6a631affc94d70d20c337740fc4d316977"} Dec 05 01:14:32 crc kubenswrapper[4665]: I1205 01:14:32.877414 4665 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:32 crc kubenswrapper[4665]: I1205 01:14:32.877426 4665 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:32 crc kubenswrapper[4665]: E1205 01:14:32.877719 4665 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:32 crc kubenswrapper[4665]: I1205 01:14:32.877724 4665 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:32 crc kubenswrapper[4665]: I1205 01:14:32.878077 4665 status_manager.go:851] "Failed to get status for pod" podUID="30d7d777-4450-4236-85fe-fe6924c43319" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.236:6443: connect: connection refused" Dec 05 01:14:32 crc kubenswrapper[4665]: E1205 01:14:32.959001 4665 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.236:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" volumeName="registry-storage" Dec 05 01:14:33 crc kubenswrapper[4665]: I1205 01:14:33.885657 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2825b07cf7cae24bcf01f80caca45aa13407658c9dd9c1d49609e7f61d10ac43"} Dec 05 01:14:33 crc kubenswrapper[4665]: I1205 01:14:33.885921 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"78ca4591eba48dcf28f560de2a28fabb5ae9531436e7586596f5bc566e148853"} Dec 05 01:14:33 crc kubenswrapper[4665]: I1205 01:14:33.885932 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bb6a9ab631feebadd6c26c7eb82aa9c3a6321a569b8ff69b92f1bfa1b1097582"} Dec 05 01:14:33 crc kubenswrapper[4665]: I1205 01:14:33.885939 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9c81f97829ee3eed3a6034c1d71b8588de53240d1d0a629fdbcea1aead6f2325"} Dec 05 01:14:33 crc kubenswrapper[4665]: I1205 01:14:33.885948 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"44aa94f62a6c8f7a9fac4e973671742af1981fd2201e7a7d539b70c5e1eab31d"} Dec 05 01:14:33 crc kubenswrapper[4665]: I1205 01:14:33.886176 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:33 crc kubenswrapper[4665]: I1205 01:14:33.886200 4665 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:33 crc kubenswrapper[4665]: I1205 01:14:33.886214 4665 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:35 crc kubenswrapper[4665]: I1205 01:14:35.897095 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 01:14:35 crc kubenswrapper[4665]: I1205 01:14:35.898161 4665 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521" exitCode=1 Dec 05 01:14:35 crc kubenswrapper[4665]: I1205 01:14:35.898236 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521"} Dec 05 01:14:35 crc kubenswrapper[4665]: I1205 01:14:35.899127 4665 scope.go:117] "RemoveContainer" containerID="5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.100233 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" podUID="29ef4f10-a6b6-4551-8067-0a82efc5651d" containerName="oauth-openshift" containerID="cri-o://43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f" gracePeriod=15 Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.455334 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547407 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-router-certs\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547480 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-error\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547505 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-policies\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547532 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-dir\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547572 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-trusted-ca-bundle\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547588 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-provider-selection\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547606 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-cliconfig\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547642 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jq98p\" (UniqueName: \"kubernetes.io/projected/29ef4f10-a6b6-4551-8067-0a82efc5651d-kube-api-access-jq98p\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547657 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-login\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547677 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-service-ca\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547692 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-ocp-branding-template\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547725 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-serving-cert\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547748 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-session\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.547770 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-idp-0-file-data\") pod \"29ef4f10-a6b6-4551-8067-0a82efc5651d\" (UID: \"29ef4f10-a6b6-4551-8067-0a82efc5651d\") " Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.548948 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.549054 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.549106 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.549713 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.554227 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.555404 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29ef4f10-a6b6-4551-8067-0a82efc5651d-kube-api-access-jq98p" (OuterVolumeSpecName: "kube-api-access-jq98p") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "kube-api-access-jq98p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.555635 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.556539 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.556787 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.558629 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.559183 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.559677 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.559805 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.559965 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "29ef4f10-a6b6-4551-8067-0a82efc5651d" (UID: "29ef4f10-a6b6-4551-8067-0a82efc5651d"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.649173 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.649468 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.649572 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.649655 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.649719 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.649795 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.649881 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.650037 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.650116 4665 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.650188 4665 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/29ef4f10-a6b6-4551-8067-0a82efc5651d-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.650262 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.650349 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.650411 4665 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/29ef4f10-a6b6-4551-8067-0a82efc5651d-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.650483 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jq98p\" (UniqueName: \"kubernetes.io/projected/29ef4f10-a6b6-4551-8067-0a82efc5651d-kube-api-access-jq98p\") on node \"crc\" DevicePath \"\"" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.721990 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.904013 4665 generic.go:334] "Generic (PLEG): container finished" podID="29ef4f10-a6b6-4551-8067-0a82efc5651d" containerID="43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f" exitCode=0 Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.904062 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.904805 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" event={"ID":"29ef4f10-a6b6-4551-8067-0a82efc5651d","Type":"ContainerDied","Data":"43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f"} Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.904885 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-6bhrf" event={"ID":"29ef4f10-a6b6-4551-8067-0a82efc5651d","Type":"ContainerDied","Data":"e0b75f55080154ed48257d2fd0948ef4064971098686ba87740ab7a4324715c3"} Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.904907 4665 scope.go:117] "RemoveContainer" containerID="43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.907799 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.907835 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b008801fb1e1ba75a469cebda222d1da30b918780324bac681cd78fd9d8a798a"} Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.912231 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.912278 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.920925 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.931698 4665 scope.go:117] "RemoveContainer" containerID="43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f" Dec 05 01:14:36 crc kubenswrapper[4665]: E1205 01:14:36.932099 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f\": container with ID starting with 43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f not found: ID does not exist" containerID="43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f" Dec 05 01:14:36 crc kubenswrapper[4665]: I1205 01:14:36.932135 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f"} err="failed to get container status \"43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f\": rpc error: code = NotFound desc = could not find container \"43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f\": container with ID starting with 43ccccc0615602940255c212d6080f168f016910a0f52b442cc2ea5bd8b17d6f not found: ID does not exist" Dec 05 01:14:39 crc kubenswrapper[4665]: I1205 01:14:39.611304 4665 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:39 crc kubenswrapper[4665]: I1205 01:14:39.800701 4665 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="183a9881-99c4-4d6d-85a3-99834953b2f3" Dec 05 01:14:39 crc kubenswrapper[4665]: I1205 01:14:39.921429 4665 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:39 crc kubenswrapper[4665]: I1205 01:14:39.921457 4665 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:39 crc kubenswrapper[4665]: I1205 01:14:39.924302 4665 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="183a9881-99c4-4d6d-85a3-99834953b2f3" Dec 05 01:14:41 crc kubenswrapper[4665]: I1205 01:14:41.165881 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:14:41 crc kubenswrapper[4665]: I1205 01:14:41.166504 4665 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 01:14:41 crc kubenswrapper[4665]: I1205 01:14:41.166564 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 01:14:46 crc kubenswrapper[4665]: I1205 01:14:46.103000 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 01:14:46 crc kubenswrapper[4665]: I1205 01:14:46.681619 4665 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 01:14:46 crc kubenswrapper[4665]: I1205 01:14:46.721675 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:14:47 crc kubenswrapper[4665]: I1205 01:14:47.120986 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 01:14:47 crc kubenswrapper[4665]: I1205 01:14:47.510560 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 01:14:47 crc kubenswrapper[4665]: I1205 01:14:47.513675 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 01:14:48 crc kubenswrapper[4665]: I1205 01:14:48.056533 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 01:14:48 crc kubenswrapper[4665]: I1205 01:14:48.191656 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 01:14:48 crc kubenswrapper[4665]: I1205 01:14:48.291558 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 01:14:48 crc kubenswrapper[4665]: I1205 01:14:48.388783 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 01:14:48 crc kubenswrapper[4665]: I1205 01:14:48.704995 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 01:14:48 crc kubenswrapper[4665]: I1205 01:14:48.778783 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 01:14:48 crc kubenswrapper[4665]: I1205 01:14:48.801593 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 01:14:49 crc kubenswrapper[4665]: I1205 01:14:49.494004 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 01:14:50 crc kubenswrapper[4665]: I1205 01:14:50.900058 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.130935 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.166460 4665 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.166520 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.193094 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.250872 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.331962 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.332806 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.409824 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.480964 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.903731 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.939672 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 01:14:51 crc kubenswrapper[4665]: I1205 01:14:51.997880 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 01:14:52 crc kubenswrapper[4665]: I1205 01:14:52.407958 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 01:14:52 crc kubenswrapper[4665]: I1205 01:14:52.696599 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 01:14:52 crc kubenswrapper[4665]: I1205 01:14:52.812479 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 01:14:52 crc kubenswrapper[4665]: I1205 01:14:52.871001 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 01:14:53 crc kubenswrapper[4665]: I1205 01:14:53.002948 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 01:14:53 crc kubenswrapper[4665]: I1205 01:14:53.156664 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 01:14:53 crc kubenswrapper[4665]: I1205 01:14:53.581525 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 01:14:53 crc kubenswrapper[4665]: I1205 01:14:53.605126 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 01:14:54 crc kubenswrapper[4665]: I1205 01:14:54.137652 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 01:14:54 crc kubenswrapper[4665]: I1205 01:14:54.261812 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 01:14:54 crc kubenswrapper[4665]: I1205 01:14:54.493041 4665 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 01:14:54 crc kubenswrapper[4665]: I1205 01:14:54.698750 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 01:14:54 crc kubenswrapper[4665]: I1205 01:14:54.931073 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 01:14:54 crc kubenswrapper[4665]: I1205 01:14:54.932638 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 01:14:54 crc kubenswrapper[4665]: I1205 01:14:54.948000 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 01:14:54 crc kubenswrapper[4665]: I1205 01:14:54.977702 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.188925 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.216582 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.219566 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.250859 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.315393 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.413097 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.508107 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.573777 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.657113 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.752091 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.787973 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.813518 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.814584 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.831985 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.876024 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 01:14:55 crc kubenswrapper[4665]: I1205 01:14:55.942523 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.106360 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.151907 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.218633 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.272440 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.295878 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.438766 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.641185 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.642026 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.709529 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.716428 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.739340 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.777251 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.892455 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.903708 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.970822 4665 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.975552 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=35.975524915 podStartE2EDuration="35.975524915s" podCreationTimestamp="2025-12-05 01:14:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:14:39.634720847 +0000 UTC m=+254.974113176" watchObservedRunningTime="2025-12-05 01:14:56.975524915 +0000 UTC m=+272.314917244" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.976383 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.978761 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-6bhrf","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.978842 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-657494565c-7c5dz","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 01:14:56 crc kubenswrapper[4665]: E1205 01:14:56.979201 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30d7d777-4450-4236-85fe-fe6924c43319" containerName="installer" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.979247 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="30d7d777-4450-4236-85fe-fe6924c43319" containerName="installer" Dec 05 01:14:56 crc kubenswrapper[4665]: E1205 01:14:56.979329 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29ef4f10-a6b6-4551-8067-0a82efc5651d" containerName="oauth-openshift" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.979352 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="29ef4f10-a6b6-4551-8067-0a82efc5651d" containerName="oauth-openshift" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.979243 4665 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.979466 4665 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="31e34d2b-42ec-4356-bd31-56ae869e58b0" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.979639 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="29ef4f10-a6b6-4551-8067-0a82efc5651d" containerName="oauth-openshift" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.979677 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="30d7d777-4450-4236-85fe-fe6924c43319" containerName="installer" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.983585 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:56 crc kubenswrapper[4665]: I1205 01:14:56.994061 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.000223 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.004414 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.004719 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.005191 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.005514 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.014680 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.015159 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.015721 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.016059 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.016426 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.017672 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.021951 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.022451 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.026865 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.034714 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.037465 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.046378 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.055762 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=18.055746622 podStartE2EDuration="18.055746622s" podCreationTimestamp="2025-12-05 01:14:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:14:57.052692954 +0000 UTC m=+272.392085293" watchObservedRunningTime="2025-12-05 01:14:57.055746622 +0000 UTC m=+272.395138921" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116176 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116540 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116576 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1d4ea275-598c-4551-8749-89ee929ef062-audit-dir\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116596 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rsptl\" (UniqueName: \"kubernetes.io/projected/1d4ea275-598c-4551-8749-89ee929ef062-kube-api-access-rsptl\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116623 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116655 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-audit-policies\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116806 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116865 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-cliconfig\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116899 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-router-certs\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116928 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-service-ca\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.116973 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-serving-cert\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.117016 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-session\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.117054 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-login\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.117080 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-error\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.117136 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.193548 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218014 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218067 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1d4ea275-598c-4551-8749-89ee929ef062-audit-dir\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218095 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rsptl\" (UniqueName: \"kubernetes.io/projected/1d4ea275-598c-4551-8749-89ee929ef062-kube-api-access-rsptl\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218139 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218180 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-audit-policies\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218204 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218225 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-cliconfig\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218247 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-router-certs\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218270 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-service-ca\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218313 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-serving-cert\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218338 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-session\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218361 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-login\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218382 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-error\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.218411 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.219403 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.219652 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-service-ca\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.220406 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-cliconfig\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.220687 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1d4ea275-598c-4551-8749-89ee929ef062-audit-dir\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.221094 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1d4ea275-598c-4551-8749-89ee929ef062-audit-policies\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.224231 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.224577 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-error\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.225947 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.226050 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-serving-cert\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.226140 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-router-certs\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.226359 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.228654 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-system-session\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.229853 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.231843 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/1d4ea275-598c-4551-8749-89ee929ef062-v4-0-config-user-template-login\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.245552 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rsptl\" (UniqueName: \"kubernetes.io/projected/1d4ea275-598c-4551-8749-89ee929ef062-kube-api-access-rsptl\") pod \"oauth-openshift-657494565c-7c5dz\" (UID: \"1d4ea275-598c-4551-8749-89ee929ef062\") " pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.262700 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.298424 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.318906 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.464598 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.484837 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.527661 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.666908 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.739754 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.842487 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.869470 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.889169 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.939683 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 01:14:57 crc kubenswrapper[4665]: I1205 01:14:57.940440 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.064160 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.248961 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.258833 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.405387 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.529916 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.570881 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.576658 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.681279 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.683420 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.689813 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.732524 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.742654 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.790125 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 01:14:58 crc kubenswrapper[4665]: I1205 01:14:58.900362 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29ef4f10-a6b6-4551-8067-0a82efc5651d" path="/var/lib/kubelet/pods/29ef4f10-a6b6-4551-8067-0a82efc5651d/volumes" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.018415 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.133268 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.226053 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.349137 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.402169 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.529930 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.541413 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.567649 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.705360 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.873396 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.951415 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 01:14:59 crc kubenswrapper[4665]: I1205 01:14:59.967259 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.059404 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.124584 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.208521 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.209160 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.259448 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.291385 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.393253 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.462675 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.543527 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.545499 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.630093 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.731341 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.774270 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.787702 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.814128 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 01:15:00 crc kubenswrapper[4665]: I1205 01:15:00.987723 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.047902 4665 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.048810 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.067147 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.067470 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.108718 4665 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.108974 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820" gracePeriod=5 Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.116024 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.123124 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.132570 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.167017 4665 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.167069 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.167117 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.167824 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"b008801fb1e1ba75a469cebda222d1da30b918780324bac681cd78fd9d8a798a"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.167945 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://b008801fb1e1ba75a469cebda222d1da30b918780324bac681cd78fd9d8a798a" gracePeriod=30 Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.284732 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.336904 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.476178 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.490337 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.527722 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.538019 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.539275 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.658707 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.662222 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.713815 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.763142 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.858798 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-657494565c-7c5dz"] Dec 05 01:15:01 crc kubenswrapper[4665]: I1205 01:15:01.927589 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.003614 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.119407 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.156988 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.204236 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.219857 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.295673 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-657494565c-7c5dz"] Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.311793 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.357632 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.561098 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.564685 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.629394 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.659743 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.724714 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.775514 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.811870 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.812490 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.855608 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.859217 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.863686 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.904833 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.911791 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 01:15:02 crc kubenswrapper[4665]: I1205 01:15:02.930252 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.034355 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.053397 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.057493 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" event={"ID":"1d4ea275-598c-4551-8749-89ee929ef062","Type":"ContainerStarted","Data":"a8e8e9de3115f3d0f241946a08e59453880538bf3c0ff7ddf2ad014915d41040"} Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.057539 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" event={"ID":"1d4ea275-598c-4551-8749-89ee929ef062","Type":"ContainerStarted","Data":"92aa7b9abdcf61db3fa810152ce2fa3b83a47cfc9c6f53423e578734d0abacfd"} Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.057875 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.081188 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" podStartSLOduration=52.08116688 podStartE2EDuration="52.08116688s" podCreationTimestamp="2025-12-05 01:14:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:15:03.076913861 +0000 UTC m=+278.416306160" watchObservedRunningTime="2025-12-05 01:15:03.08116688 +0000 UTC m=+278.420559179" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.087504 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.113586 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.121064 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.152137 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-657494565c-7c5dz" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.164452 4665 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.256536 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.311387 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.428086 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.429852 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.576848 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.578639 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.597519 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.642527 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.701635 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.769481 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.788012 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.799457 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.820453 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.879522 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.898730 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 01:15:03 crc kubenswrapper[4665]: I1205 01:15:03.918887 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.098482 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.132514 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.134264 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.162056 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.275644 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.303178 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.311675 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.401585 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.460768 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.646760 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.670615 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.673027 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.820220 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.848686 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.853695 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.907543 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.913605 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 01:15:04 crc kubenswrapper[4665]: I1205 01:15:04.920395 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.282143 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.387027 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.420976 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.501021 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.531014 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.669763 4665 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.791159 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.896753 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 01:15:05 crc kubenswrapper[4665]: I1205 01:15:05.939905 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.124522 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.162110 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.213177 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.282910 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.714480 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.714592 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.790529 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.832877 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833028 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833047 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833125 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833189 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833243 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833185 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833240 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833366 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833699 4665 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833734 4665 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833753 4665 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.833770 4665 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.847893 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.906388 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.907699 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.908896 4665 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.931969 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 01:15:06 crc kubenswrapper[4665]: I1205 01:15:06.932126 4665 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="0ebdc207-49d1-4262-8167-d938e0dc74b1" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:06.935065 4665 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:06.940529 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:06.940700 4665 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="0ebdc207-49d1-4262-8167-d938e0dc74b1" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.097248 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.097803 4665 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820" exitCode=137 Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.097909 4665 scope.go:117] "RemoveContainer" containerID="993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.097972 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.118750 4665 scope.go:117] "RemoveContainer" containerID="993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820" Dec 05 01:15:07 crc kubenswrapper[4665]: E1205 01:15:07.119311 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820\": container with ID starting with 993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820 not found: ID does not exist" containerID="993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.119359 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820"} err="failed to get container status \"993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820\": rpc error: code = NotFound desc = could not find container \"993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820\": container with ID starting with 993651b24686775ba568018170de138a447d76020f0c65e920229a5549cae820 not found: ID does not exist" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.256769 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.325435 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.338373 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.344830 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.460579 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.491642 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.537721 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 01:15:07 crc kubenswrapper[4665]: I1205 01:15:07.763804 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 01:15:08 crc kubenswrapper[4665]: I1205 01:15:08.080103 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 01:15:08 crc kubenswrapper[4665]: I1205 01:15:08.087824 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 01:15:08 crc kubenswrapper[4665]: I1205 01:15:08.270291 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 01:15:09 crc kubenswrapper[4665]: I1205 01:15:09.271680 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 01:15:09 crc kubenswrapper[4665]: I1205 01:15:09.743981 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 01:15:24 crc kubenswrapper[4665]: I1205 01:15:24.789807 4665 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Dec 05 01:15:29 crc kubenswrapper[4665]: I1205 01:15:29.235054 4665 generic.go:334] "Generic (PLEG): container finished" podID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerID="540f8a0062548c6c18d5e58fb57f2a1122bf8ca515ffd25804e473bcd88da918" exitCode=0 Dec 05 01:15:29 crc kubenswrapper[4665]: I1205 01:15:29.235185 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" event={"ID":"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580","Type":"ContainerDied","Data":"540f8a0062548c6c18d5e58fb57f2a1122bf8ca515ffd25804e473bcd88da918"} Dec 05 01:15:29 crc kubenswrapper[4665]: I1205 01:15:29.236011 4665 scope.go:117] "RemoveContainer" containerID="540f8a0062548c6c18d5e58fb57f2a1122bf8ca515ffd25804e473bcd88da918" Dec 05 01:15:30 crc kubenswrapper[4665]: I1205 01:15:30.241616 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" event={"ID":"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580","Type":"ContainerStarted","Data":"dc69b7f66187e71118730b508972eb6af8b4f45f73c411022ec3b3557ce5ba64"} Dec 05 01:15:30 crc kubenswrapper[4665]: I1205 01:15:30.243055 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:15:30 crc kubenswrapper[4665]: I1205 01:15:30.244396 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:15:31 crc kubenswrapper[4665]: I1205 01:15:31.248060 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 01:15:31 crc kubenswrapper[4665]: I1205 01:15:31.250339 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 01:15:31 crc kubenswrapper[4665]: I1205 01:15:31.250406 4665 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="b008801fb1e1ba75a469cebda222d1da30b918780324bac681cd78fd9d8a798a" exitCode=137 Dec 05 01:15:31 crc kubenswrapper[4665]: I1205 01:15:31.250677 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"b008801fb1e1ba75a469cebda222d1da30b918780324bac681cd78fd9d8a798a"} Dec 05 01:15:31 crc kubenswrapper[4665]: I1205 01:15:31.250734 4665 scope.go:117] "RemoveContainer" containerID="5db7bfc9c25fdbc574a11bc46df53f84808956dc03a5b69cec104018377e3521" Dec 05 01:15:32 crc kubenswrapper[4665]: I1205 01:15:32.259243 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Dec 05 01:15:32 crc kubenswrapper[4665]: I1205 01:15:32.262998 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"91a6ab43deae7c4ab6bfc13db6cccf7139040f02db8ddfaf418f206d3b5cb3ea"} Dec 05 01:15:36 crc kubenswrapper[4665]: I1205 01:15:36.721661 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:15:41 crc kubenswrapper[4665]: I1205 01:15:41.166706 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:15:41 crc kubenswrapper[4665]: I1205 01:15:41.171486 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:15:41 crc kubenswrapper[4665]: I1205 01:15:41.323327 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.134365 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v24sw"] Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.135038 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-v24sw" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerName="registry-server" containerID="cri-o://377830bc3883021c17159e5aa9a0221a0c7d68f8b18acfd84a71b663530a49e5" gracePeriod=2 Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.346615 4665 generic.go:334] "Generic (PLEG): container finished" podID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerID="377830bc3883021c17159e5aa9a0221a0c7d68f8b18acfd84a71b663530a49e5" exitCode=0 Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.346664 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v24sw" event={"ID":"2494dbd6-0990-4ec2-9338-f0ef366f13f5","Type":"ContainerDied","Data":"377830bc3883021c17159e5aa9a0221a0c7d68f8b18acfd84a71b663530a49e5"} Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.443246 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.495482 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8bfw\" (UniqueName: \"kubernetes.io/projected/2494dbd6-0990-4ec2-9338-f0ef366f13f5-kube-api-access-x8bfw\") pod \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.496515 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-utilities\") pod \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.496553 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-catalog-content\") pod \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\" (UID: \"2494dbd6-0990-4ec2-9338-f0ef366f13f5\") " Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.497242 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-utilities" (OuterVolumeSpecName: "utilities") pod "2494dbd6-0990-4ec2-9338-f0ef366f13f5" (UID: "2494dbd6-0990-4ec2-9338-f0ef366f13f5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.508542 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2494dbd6-0990-4ec2-9338-f0ef366f13f5-kube-api-access-x8bfw" (OuterVolumeSpecName: "kube-api-access-x8bfw") pod "2494dbd6-0990-4ec2-9338-f0ef366f13f5" (UID: "2494dbd6-0990-4ec2-9338-f0ef366f13f5"). InnerVolumeSpecName "kube-api-access-x8bfw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.549022 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2494dbd6-0990-4ec2-9338-f0ef366f13f5" (UID: "2494dbd6-0990-4ec2-9338-f0ef366f13f5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.597885 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8bfw\" (UniqueName: \"kubernetes.io/projected/2494dbd6-0990-4ec2-9338-f0ef366f13f5-kube-api-access-x8bfw\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.597931 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:46 crc kubenswrapper[4665]: I1205 01:15:46.597944 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2494dbd6-0990-4ec2-9338-f0ef366f13f5-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:47 crc kubenswrapper[4665]: I1205 01:15:47.353237 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-v24sw" event={"ID":"2494dbd6-0990-4ec2-9338-f0ef366f13f5","Type":"ContainerDied","Data":"5f7bb0aeed4763c93401f179efd53ab48fd2640a102fc1c0692ced0212c65681"} Dec 05 01:15:47 crc kubenswrapper[4665]: I1205 01:15:47.353282 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-v24sw" Dec 05 01:15:47 crc kubenswrapper[4665]: I1205 01:15:47.353317 4665 scope.go:117] "RemoveContainer" containerID="377830bc3883021c17159e5aa9a0221a0c7d68f8b18acfd84a71b663530a49e5" Dec 05 01:15:47 crc kubenswrapper[4665]: I1205 01:15:47.370543 4665 scope.go:117] "RemoveContainer" containerID="b3c46f8b48c4a0bbfb8ce768aba048b4191215a5aa800a979520c88d154d2f30" Dec 05 01:15:47 crc kubenswrapper[4665]: I1205 01:15:47.373686 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-v24sw"] Dec 05 01:15:47 crc kubenswrapper[4665]: I1205 01:15:47.377064 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-v24sw"] Dec 05 01:15:47 crc kubenswrapper[4665]: I1205 01:15:47.389071 4665 scope.go:117] "RemoveContainer" containerID="879b9912bc24b6363df29eddc5c0f4043ccb698ac7783325aec8e030ed00d8ff" Dec 05 01:15:48 crc kubenswrapper[4665]: I1205 01:15:48.900826 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" path="/var/lib/kubelet/pods/2494dbd6-0990-4ec2-9338-f0ef366f13f5/volumes" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.228828 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss"] Dec 05 01:15:49 crc kubenswrapper[4665]: E1205 01:15:49.229134 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerName="extract-utilities" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.229152 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerName="extract-utilities" Dec 05 01:15:49 crc kubenswrapper[4665]: E1205 01:15:49.229176 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.229183 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 01:15:49 crc kubenswrapper[4665]: E1205 01:15:49.229195 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerName="registry-server" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.229204 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerName="registry-server" Dec 05 01:15:49 crc kubenswrapper[4665]: E1205 01:15:49.229216 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerName="extract-content" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.229226 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerName="extract-content" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.229394 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.229405 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="2494dbd6-0990-4ec2-9338-f0ef366f13f5" containerName="registry-server" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.229823 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.231882 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.232367 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.242542 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss"] Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.266775 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv"] Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.266976 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" podUID="34ce612a-9777-4777-b6af-e98a53b3fb57" containerName="route-controller-manager" containerID="cri-o://01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7" gracePeriod=30 Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.287761 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7twnf"] Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.287958 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" podUID="f9f525e3-8aea-4e56-99ca-6a06cdafa897" containerName="controller-manager" containerID="cri-o://9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64" gracePeriod=30 Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.329797 4665 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7twnf container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.329831 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29229102-10ab-474a-a236-0dd9bb1553cb-config-volume\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.329842 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" podUID="f9f525e3-8aea-4e56-99ca-6a06cdafa897" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.329868 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29229102-10ab-474a-a236-0dd9bb1553cb-secret-volume\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.329916 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2ls9x\" (UniqueName: \"kubernetes.io/projected/29229102-10ab-474a-a236-0dd9bb1553cb-kube-api-access-2ls9x\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.431498 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2ls9x\" (UniqueName: \"kubernetes.io/projected/29229102-10ab-474a-a236-0dd9bb1553cb-kube-api-access-2ls9x\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.431844 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29229102-10ab-474a-a236-0dd9bb1553cb-config-volume\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.431869 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29229102-10ab-474a-a236-0dd9bb1553cb-secret-volume\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.432567 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29229102-10ab-474a-a236-0dd9bb1553cb-config-volume\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.439239 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29229102-10ab-474a-a236-0dd9bb1553cb-secret-volume\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.473131 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2ls9x\" (UniqueName: \"kubernetes.io/projected/29229102-10ab-474a-a236-0dd9bb1553cb-kube-api-access-2ls9x\") pod \"collect-profiles-29414955-2klss\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.543032 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.719991 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.835812 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-config\") pod \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.835879 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9f525e3-8aea-4e56-99ca-6a06cdafa897-serving-cert\") pod \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.835911 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-proxy-ca-bundles\") pod \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.835941 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-client-ca\") pod \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.835980 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xjcmx\" (UniqueName: \"kubernetes.io/projected/f9f525e3-8aea-4e56-99ca-6a06cdafa897-kube-api-access-xjcmx\") pod \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\" (UID: \"f9f525e3-8aea-4e56-99ca-6a06cdafa897\") " Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.838523 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-config" (OuterVolumeSpecName: "config") pod "f9f525e3-8aea-4e56-99ca-6a06cdafa897" (UID: "f9f525e3-8aea-4e56-99ca-6a06cdafa897"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.838792 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f9f525e3-8aea-4e56-99ca-6a06cdafa897" (UID: "f9f525e3-8aea-4e56-99ca-6a06cdafa897"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.838991 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-client-ca" (OuterVolumeSpecName: "client-ca") pod "f9f525e3-8aea-4e56-99ca-6a06cdafa897" (UID: "f9f525e3-8aea-4e56-99ca-6a06cdafa897"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.842175 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9f525e3-8aea-4e56-99ca-6a06cdafa897-kube-api-access-xjcmx" (OuterVolumeSpecName: "kube-api-access-xjcmx") pod "f9f525e3-8aea-4e56-99ca-6a06cdafa897" (UID: "f9f525e3-8aea-4e56-99ca-6a06cdafa897"). InnerVolumeSpecName "kube-api-access-xjcmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.847095 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9f525e3-8aea-4e56-99ca-6a06cdafa897-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f9f525e3-8aea-4e56-99ca-6a06cdafa897" (UID: "f9f525e3-8aea-4e56-99ca-6a06cdafa897"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.945907 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.946245 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9f525e3-8aea-4e56-99ca-6a06cdafa897-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.946260 4665 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.946274 4665 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9f525e3-8aea-4e56-99ca-6a06cdafa897-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.946286 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xjcmx\" (UniqueName: \"kubernetes.io/projected/f9f525e3-8aea-4e56-99ca-6a06cdafa897-kube-api-access-xjcmx\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:49 crc kubenswrapper[4665]: I1205 01:15:49.967162 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.047780 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8s9lp\" (UniqueName: \"kubernetes.io/projected/34ce612a-9777-4777-b6af-e98a53b3fb57-kube-api-access-8s9lp\") pod \"34ce612a-9777-4777-b6af-e98a53b3fb57\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.047853 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-config\") pod \"34ce612a-9777-4777-b6af-e98a53b3fb57\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.047897 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-client-ca\") pod \"34ce612a-9777-4777-b6af-e98a53b3fb57\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.047982 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34ce612a-9777-4777-b6af-e98a53b3fb57-serving-cert\") pod \"34ce612a-9777-4777-b6af-e98a53b3fb57\" (UID: \"34ce612a-9777-4777-b6af-e98a53b3fb57\") " Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.049647 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-config" (OuterVolumeSpecName: "config") pod "34ce612a-9777-4777-b6af-e98a53b3fb57" (UID: "34ce612a-9777-4777-b6af-e98a53b3fb57"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.049986 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-client-ca" (OuterVolumeSpecName: "client-ca") pod "34ce612a-9777-4777-b6af-e98a53b3fb57" (UID: "34ce612a-9777-4777-b6af-e98a53b3fb57"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.053533 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34ce612a-9777-4777-b6af-e98a53b3fb57-kube-api-access-8s9lp" (OuterVolumeSpecName: "kube-api-access-8s9lp") pod "34ce612a-9777-4777-b6af-e98a53b3fb57" (UID: "34ce612a-9777-4777-b6af-e98a53b3fb57"). InnerVolumeSpecName "kube-api-access-8s9lp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.053800 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34ce612a-9777-4777-b6af-e98a53b3fb57-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "34ce612a-9777-4777-b6af-e98a53b3fb57" (UID: "34ce612a-9777-4777-b6af-e98a53b3fb57"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.149757 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8s9lp\" (UniqueName: \"kubernetes.io/projected/34ce612a-9777-4777-b6af-e98a53b3fb57-kube-api-access-8s9lp\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.149793 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.149802 4665 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/34ce612a-9777-4777-b6af-e98a53b3fb57-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.149812 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/34ce612a-9777-4777-b6af-e98a53b3fb57-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.291904 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss"] Dec 05 01:15:50 crc kubenswrapper[4665]: W1205 01:15:50.297699 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29229102_10ab_474a_a236_0dd9bb1553cb.slice/crio-df196b9342ae5d22a710ce4a9263b62b1920f6b93879eb2be88559781c3c7e17 WatchSource:0}: Error finding container df196b9342ae5d22a710ce4a9263b62b1920f6b93879eb2be88559781c3c7e17: Status 404 returned error can't find the container with id df196b9342ae5d22a710ce4a9263b62b1920f6b93879eb2be88559781c3c7e17 Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.368963 4665 generic.go:334] "Generic (PLEG): container finished" podID="34ce612a-9777-4777-b6af-e98a53b3fb57" containerID="01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7" exitCode=0 Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.369007 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" event={"ID":"34ce612a-9777-4777-b6af-e98a53b3fb57","Type":"ContainerDied","Data":"01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7"} Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.368989 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.369043 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" event={"ID":"34ce612a-9777-4777-b6af-e98a53b3fb57","Type":"ContainerDied","Data":"6d2e4d4cdb6448c5da780f8f4c9822c7ea47d4df3f0c489fbc5ca5f1fef2a695"} Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.369062 4665 scope.go:117] "RemoveContainer" containerID="01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.370726 4665 generic.go:334] "Generic (PLEG): container finished" podID="f9f525e3-8aea-4e56-99ca-6a06cdafa897" containerID="9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64" exitCode=0 Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.370802 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" event={"ID":"f9f525e3-8aea-4e56-99ca-6a06cdafa897","Type":"ContainerDied","Data":"9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64"} Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.370813 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.370832 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7twnf" event={"ID":"f9f525e3-8aea-4e56-99ca-6a06cdafa897","Type":"ContainerDied","Data":"af8c95e690d6fd0ee5d5c1529890d1f87386681387c783656b61b97c25c6658c"} Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.385970 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" event={"ID":"29229102-10ab-474a-a236-0dd9bb1553cb","Type":"ContainerStarted","Data":"df196b9342ae5d22a710ce4a9263b62b1920f6b93879eb2be88559781c3c7e17"} Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.414051 4665 scope.go:117] "RemoveContainer" containerID="01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7" Dec 05 01:15:50 crc kubenswrapper[4665]: E1205 01:15:50.415080 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7\": container with ID starting with 01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7 not found: ID does not exist" containerID="01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.415110 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7"} err="failed to get container status \"01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7\": rpc error: code = NotFound desc = could not find container \"01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7\": container with ID starting with 01d30b84d42bea6eef7be7e78b35d9d7d8c78ec73300e2faee7a36e3addb5ab7 not found: ID does not exist" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.415143 4665 scope.go:117] "RemoveContainer" containerID="9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.428018 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7twnf"] Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.432520 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7twnf"] Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.435956 4665 scope.go:117] "RemoveContainer" containerID="9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64" Dec 05 01:15:50 crc kubenswrapper[4665]: E1205 01:15:50.437044 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64\": container with ID starting with 9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64 not found: ID does not exist" containerID="9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.437159 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64"} err="failed to get container status \"9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64\": rpc error: code = NotFound desc = could not find container \"9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64\": container with ID starting with 9711538003c40c9164d7b34b77dd2143a471c51a65c9892f9dbef80695f02b64 not found: ID does not exist" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.437751 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv"] Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.442808 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv"] Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.768650 4665 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-t24dv container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.768722 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-t24dv" podUID="34ce612a-9777-4777-b6af-e98a53b3fb57" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.8:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.900369 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="34ce612a-9777-4777-b6af-e98a53b3fb57" path="/var/lib/kubelet/pods/34ce612a-9777-4777-b6af-e98a53b3fb57/volumes" Dec 05 01:15:50 crc kubenswrapper[4665]: I1205 01:15:50.901197 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9f525e3-8aea-4e56-99ca-6a06cdafa897" path="/var/lib/kubelet/pods/f9f525e3-8aea-4e56-99ca-6a06cdafa897/volumes" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.013022 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd"] Dec 05 01:15:51 crc kubenswrapper[4665]: E1205 01:15:51.013314 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9f525e3-8aea-4e56-99ca-6a06cdafa897" containerName="controller-manager" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.013333 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9f525e3-8aea-4e56-99ca-6a06cdafa897" containerName="controller-manager" Dec 05 01:15:51 crc kubenswrapper[4665]: E1205 01:15:51.013358 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34ce612a-9777-4777-b6af-e98a53b3fb57" containerName="route-controller-manager" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.013366 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="34ce612a-9777-4777-b6af-e98a53b3fb57" containerName="route-controller-manager" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.013474 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9f525e3-8aea-4e56-99ca-6a06cdafa897" containerName="controller-manager" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.013495 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="34ce612a-9777-4777-b6af-e98a53b3fb57" containerName="route-controller-manager" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.013919 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.016350 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.018520 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.019762 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-85b9785f75-jnzc8"] Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.020272 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.020364 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.020452 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.021314 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.023975 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.024667 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.024969 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.025131 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.025181 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.026807 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.037603 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.040595 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.055578 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd"] Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061452 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-proxy-ca-bundles\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061480 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-serving-cert\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061506 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-config\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061525 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-client-ca\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061556 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jr8q9\" (UniqueName: \"kubernetes.io/projected/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-kube-api-access-jr8q9\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061592 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-config\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061613 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-client-ca\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061627 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74fee70b-9c1c-4010-9a79-c83a16d317b8-serving-cert\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.061645 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjj9s\" (UniqueName: \"kubernetes.io/projected/74fee70b-9c1c-4010-9a79-c83a16d317b8-kube-api-access-sjj9s\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.072961 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-85b9785f75-jnzc8"] Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162562 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-config\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162623 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-client-ca\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162642 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74fee70b-9c1c-4010-9a79-c83a16d317b8-serving-cert\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162667 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjj9s\" (UniqueName: \"kubernetes.io/projected/74fee70b-9c1c-4010-9a79-c83a16d317b8-kube-api-access-sjj9s\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162706 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-proxy-ca-bundles\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162721 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-serving-cert\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162741 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-config\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162759 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-client-ca\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.162783 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jr8q9\" (UniqueName: \"kubernetes.io/projected/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-kube-api-access-jr8q9\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.163575 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-client-ca\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.163788 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-config\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.164631 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-proxy-ca-bundles\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.164769 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-client-ca\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.165909 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-config\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.168654 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-serving-cert\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.170854 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74fee70b-9c1c-4010-9a79-c83a16d317b8-serving-cert\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.181062 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jr8q9\" (UniqueName: \"kubernetes.io/projected/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-kube-api-access-jr8q9\") pod \"controller-manager-85b9785f75-jnzc8\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.183259 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjj9s\" (UniqueName: \"kubernetes.io/projected/74fee70b-9c1c-4010-9a79-c83a16d317b8-kube-api-access-sjj9s\") pod \"route-controller-manager-77457b7c4c-bsstd\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.306906 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd"] Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.307756 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.355276 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.395712 4665 generic.go:334] "Generic (PLEG): container finished" podID="29229102-10ab-474a-a236-0dd9bb1553cb" containerID="19ae4d42c17f241db84dd7f9a015c0e4a98dbaa2fb1f50e8c110ccf931277448" exitCode=0 Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.395752 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" event={"ID":"29229102-10ab-474a-a236-0dd9bb1553cb","Type":"ContainerDied","Data":"19ae4d42c17f241db84dd7f9a015c0e4a98dbaa2fb1f50e8c110ccf931277448"} Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.633872 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-85b9785f75-jnzc8"] Dec 05 01:15:51 crc kubenswrapper[4665]: I1205 01:15:51.760970 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd"] Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.402348 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" event={"ID":"74fee70b-9c1c-4010-9a79-c83a16d317b8","Type":"ContainerStarted","Data":"2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001"} Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.402656 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" podUID="74fee70b-9c1c-4010-9a79-c83a16d317b8" containerName="route-controller-manager" containerID="cri-o://2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001" gracePeriod=30 Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.402672 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" event={"ID":"74fee70b-9c1c-4010-9a79-c83a16d317b8","Type":"ContainerStarted","Data":"b50ead5522c23d27d5aefa1a8bf1b9ed2d9c5729245074f51f1c65791bc0b026"} Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.403127 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.404736 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" event={"ID":"2c2e7857-9f07-40f6-91dc-68f4f4ac9708","Type":"ContainerStarted","Data":"520d16c7d05384911887a352162b5c08f4adfdc0fedd39296533897d473e3ef3"} Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.404811 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" event={"ID":"2c2e7857-9f07-40f6-91dc-68f4f4ac9708","Type":"ContainerStarted","Data":"677413dfd3c10f4f918000f635ecde416c70042d9c5f4e3fd346b72f711c3e66"} Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.430657 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" podStartSLOduration=3.430639861 podStartE2EDuration="3.430639861s" podCreationTimestamp="2025-12-05 01:15:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:15:52.428763284 +0000 UTC m=+327.768155583" watchObservedRunningTime="2025-12-05 01:15:52.430639861 +0000 UTC m=+327.770032160" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.432497 4665 patch_prober.go:28] interesting pod/route-controller-manager-77457b7c4c-bsstd container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.58:8443/healthz\": read tcp 10.217.0.2:36160->10.217.0.58:8443: read: connection reset by peer" start-of-body= Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.432541 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" podUID="74fee70b-9c1c-4010-9a79-c83a16d317b8" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.58:8443/healthz\": read tcp 10.217.0.2:36160->10.217.0.58:8443: read: connection reset by peer" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.456642 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" podStartSLOduration=3.456625465 podStartE2EDuration="3.456625465s" podCreationTimestamp="2025-12-05 01:15:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:15:52.453336601 +0000 UTC m=+327.792728900" watchObservedRunningTime="2025-12-05 01:15:52.456625465 +0000 UTC m=+327.796017764" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.745112 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.792424 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-77457b7c4c-bsstd_74fee70b-9c1c-4010-9a79-c83a16d317b8/route-controller-manager/0.log" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.792501 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.893136 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74fee70b-9c1c-4010-9a79-c83a16d317b8-serving-cert\") pod \"74fee70b-9c1c-4010-9a79-c83a16d317b8\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.893255 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29229102-10ab-474a-a236-0dd9bb1553cb-config-volume\") pod \"29229102-10ab-474a-a236-0dd9bb1553cb\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.893282 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2ls9x\" (UniqueName: \"kubernetes.io/projected/29229102-10ab-474a-a236-0dd9bb1553cb-kube-api-access-2ls9x\") pod \"29229102-10ab-474a-a236-0dd9bb1553cb\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.894577 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29229102-10ab-474a-a236-0dd9bb1553cb-secret-volume\") pod \"29229102-10ab-474a-a236-0dd9bb1553cb\" (UID: \"29229102-10ab-474a-a236-0dd9bb1553cb\") " Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.894601 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-client-ca\") pod \"74fee70b-9c1c-4010-9a79-c83a16d317b8\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.894632 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-config\") pod \"74fee70b-9c1c-4010-9a79-c83a16d317b8\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.894650 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjj9s\" (UniqueName: \"kubernetes.io/projected/74fee70b-9c1c-4010-9a79-c83a16d317b8-kube-api-access-sjj9s\") pod \"74fee70b-9c1c-4010-9a79-c83a16d317b8\" (UID: \"74fee70b-9c1c-4010-9a79-c83a16d317b8\") " Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.894074 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/29229102-10ab-474a-a236-0dd9bb1553cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "29229102-10ab-474a-a236-0dd9bb1553cb" (UID: "29229102-10ab-474a-a236-0dd9bb1553cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.895165 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-client-ca" (OuterVolumeSpecName: "client-ca") pod "74fee70b-9c1c-4010-9a79-c83a16d317b8" (UID: "74fee70b-9c1c-4010-9a79-c83a16d317b8"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.895537 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-config" (OuterVolumeSpecName: "config") pod "74fee70b-9c1c-4010-9a79-c83a16d317b8" (UID: "74fee70b-9c1c-4010-9a79-c83a16d317b8"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.898603 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74fee70b-9c1c-4010-9a79-c83a16d317b8-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "74fee70b-9c1c-4010-9a79-c83a16d317b8" (UID: "74fee70b-9c1c-4010-9a79-c83a16d317b8"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.898907 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/29229102-10ab-474a-a236-0dd9bb1553cb-kube-api-access-2ls9x" (OuterVolumeSpecName: "kube-api-access-2ls9x") pod "29229102-10ab-474a-a236-0dd9bb1553cb" (UID: "29229102-10ab-474a-a236-0dd9bb1553cb"). InnerVolumeSpecName "kube-api-access-2ls9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.898946 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74fee70b-9c1c-4010-9a79-c83a16d317b8-kube-api-access-sjj9s" (OuterVolumeSpecName: "kube-api-access-sjj9s") pod "74fee70b-9c1c-4010-9a79-c83a16d317b8" (UID: "74fee70b-9c1c-4010-9a79-c83a16d317b8"). InnerVolumeSpecName "kube-api-access-sjj9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.899689 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/29229102-10ab-474a-a236-0dd9bb1553cb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "29229102-10ab-474a-a236-0dd9bb1553cb" (UID: "29229102-10ab-474a-a236-0dd9bb1553cb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.995371 4665 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.995408 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/74fee70b-9c1c-4010-9a79-c83a16d317b8-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.995420 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjj9s\" (UniqueName: \"kubernetes.io/projected/74fee70b-9c1c-4010-9a79-c83a16d317b8-kube-api-access-sjj9s\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.995432 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/74fee70b-9c1c-4010-9a79-c83a16d317b8-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.995443 4665 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/29229102-10ab-474a-a236-0dd9bb1553cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.995453 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2ls9x\" (UniqueName: \"kubernetes.io/projected/29229102-10ab-474a-a236-0dd9bb1553cb-kube-api-access-2ls9x\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:52 crc kubenswrapper[4665]: I1205 01:15:52.995463 4665 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/29229102-10ab-474a-a236-0dd9bb1553cb-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.411195 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-77457b7c4c-bsstd_74fee70b-9c1c-4010-9a79-c83a16d317b8/route-controller-manager/0.log" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.411264 4665 generic.go:334] "Generic (PLEG): container finished" podID="74fee70b-9c1c-4010-9a79-c83a16d317b8" containerID="2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001" exitCode=255 Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.411437 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.412120 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" event={"ID":"74fee70b-9c1c-4010-9a79-c83a16d317b8","Type":"ContainerDied","Data":"2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001"} Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.412153 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd" event={"ID":"74fee70b-9c1c-4010-9a79-c83a16d317b8","Type":"ContainerDied","Data":"b50ead5522c23d27d5aefa1a8bf1b9ed2d9c5729245074f51f1c65791bc0b026"} Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.412178 4665 scope.go:117] "RemoveContainer" containerID="2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.417722 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.417812 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss" event={"ID":"29229102-10ab-474a-a236-0dd9bb1553cb","Type":"ContainerDied","Data":"df196b9342ae5d22a710ce4a9263b62b1920f6b93879eb2be88559781c3c7e17"} Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.417905 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="df196b9342ae5d22a710ce4a9263b62b1920f6b93879eb2be88559781c3c7e17" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.419533 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.425612 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.433888 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd"] Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.437422 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-77457b7c4c-bsstd"] Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.442366 4665 scope.go:117] "RemoveContainer" containerID="2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001" Dec 05 01:15:53 crc kubenswrapper[4665]: E1205 01:15:53.443289 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001\": container with ID starting with 2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001 not found: ID does not exist" containerID="2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001" Dec 05 01:15:53 crc kubenswrapper[4665]: I1205 01:15:53.443372 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001"} err="failed to get container status \"2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001\": rpc error: code = NotFound desc = could not find container \"2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001\": container with ID starting with 2d66a33929d31f2ec3fb179c7d2dc13cef9e5cd08626db7d486b51e707278001 not found: ID does not exist" Dec 05 01:15:54 crc kubenswrapper[4665]: I1205 01:15:54.899706 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74fee70b-9c1c-4010-9a79-c83a16d317b8" path="/var/lib/kubelet/pods/74fee70b-9c1c-4010-9a79-c83a16d317b8/volumes" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.018602 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q"] Dec 05 01:15:55 crc kubenswrapper[4665]: E1205 01:15:55.018830 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74fee70b-9c1c-4010-9a79-c83a16d317b8" containerName="route-controller-manager" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.018846 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="74fee70b-9c1c-4010-9a79-c83a16d317b8" containerName="route-controller-manager" Dec 05 01:15:55 crc kubenswrapper[4665]: E1205 01:15:55.018871 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="29229102-10ab-474a-a236-0dd9bb1553cb" containerName="collect-profiles" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.018878 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="29229102-10ab-474a-a236-0dd9bb1553cb" containerName="collect-profiles" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.018988 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="29229102-10ab-474a-a236-0dd9bb1553cb" containerName="collect-profiles" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.019011 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="74fee70b-9c1c-4010-9a79-c83a16d317b8" containerName="route-controller-manager" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.019468 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.021845 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.022006 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.022041 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.022263 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.022268 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.028254 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.031248 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e30825b1-8469-46a7-9708-a53e3f1596d7-serving-cert\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.031350 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e30825b1-8469-46a7-9708-a53e3f1596d7-config\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.031381 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zz8j\" (UniqueName: \"kubernetes.io/projected/e30825b1-8469-46a7-9708-a53e3f1596d7-kube-api-access-4zz8j\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.031409 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e30825b1-8469-46a7-9708-a53e3f1596d7-client-ca\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.038468 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q"] Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.132568 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e30825b1-8469-46a7-9708-a53e3f1596d7-serving-cert\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.132892 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e30825b1-8469-46a7-9708-a53e3f1596d7-config\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.132917 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zz8j\" (UniqueName: \"kubernetes.io/projected/e30825b1-8469-46a7-9708-a53e3f1596d7-kube-api-access-4zz8j\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.132947 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e30825b1-8469-46a7-9708-a53e3f1596d7-client-ca\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.133861 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e30825b1-8469-46a7-9708-a53e3f1596d7-client-ca\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.134187 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e30825b1-8469-46a7-9708-a53e3f1596d7-config\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.148390 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e30825b1-8469-46a7-9708-a53e3f1596d7-serving-cert\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.150733 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zz8j\" (UniqueName: \"kubernetes.io/projected/e30825b1-8469-46a7-9708-a53e3f1596d7-kube-api-access-4zz8j\") pod \"route-controller-manager-64894dc75c-cpw5q\" (UID: \"e30825b1-8469-46a7-9708-a53e3f1596d7\") " pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.340219 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:55 crc kubenswrapper[4665]: I1205 01:15:55.743589 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q"] Dec 05 01:15:55 crc kubenswrapper[4665]: W1205 01:15:55.749889 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode30825b1_8469_46a7_9708_a53e3f1596d7.slice/crio-a75a7b41f0df8c30a85f9a493a7f0037b348b1d360891fe6d289bc77ae853383 WatchSource:0}: Error finding container a75a7b41f0df8c30a85f9a493a7f0037b348b1d360891fe6d289bc77ae853383: Status 404 returned error can't find the container with id a75a7b41f0df8c30a85f9a493a7f0037b348b1d360891fe6d289bc77ae853383 Dec 05 01:15:56 crc kubenswrapper[4665]: I1205 01:15:56.435746 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" event={"ID":"e30825b1-8469-46a7-9708-a53e3f1596d7","Type":"ContainerStarted","Data":"c0a418b953dedc67af9c07e44bea9dcb56294d82f7d74b2be238cc13ae012569"} Dec 05 01:15:56 crc kubenswrapper[4665]: I1205 01:15:56.436027 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" event={"ID":"e30825b1-8469-46a7-9708-a53e3f1596d7","Type":"ContainerStarted","Data":"a75a7b41f0df8c30a85f9a493a7f0037b348b1d360891fe6d289bc77ae853383"} Dec 05 01:15:56 crc kubenswrapper[4665]: I1205 01:15:56.437163 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:56 crc kubenswrapper[4665]: I1205 01:15:56.459864 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" Dec 05 01:15:56 crc kubenswrapper[4665]: I1205 01:15:56.461756 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-64894dc75c-cpw5q" podStartSLOduration=5.461736384 podStartE2EDuration="5.461736384s" podCreationTimestamp="2025-12-05 01:15:51 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:15:56.4575208 +0000 UTC m=+331.796913099" watchObservedRunningTime="2025-12-05 01:15:56.461736384 +0000 UTC m=+331.801128683" Dec 05 01:16:11 crc kubenswrapper[4665]: I1205 01:16:11.336116 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-85b9785f75-jnzc8"] Dec 05 01:16:11 crc kubenswrapper[4665]: I1205 01:16:11.338081 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" podUID="2c2e7857-9f07-40f6-91dc-68f4f4ac9708" containerName="controller-manager" containerID="cri-o://520d16c7d05384911887a352162b5c08f4adfdc0fedd39296533897d473e3ef3" gracePeriod=30 Dec 05 01:16:11 crc kubenswrapper[4665]: I1205 01:16:11.368469 4665 patch_prober.go:28] interesting pod/controller-manager-85b9785f75-jnzc8 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.60:8443/healthz\": read tcp 10.217.0.2:56224->10.217.0.60:8443: read: connection reset by peer" start-of-body= Dec 05 01:16:11 crc kubenswrapper[4665]: I1205 01:16:11.368798 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" podUID="2c2e7857-9f07-40f6-91dc-68f4f4ac9708" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.60:8443/healthz\": read tcp 10.217.0.2:56224->10.217.0.60:8443: read: connection reset by peer" Dec 05 01:16:11 crc kubenswrapper[4665]: I1205 01:16:11.511594 4665 generic.go:334] "Generic (PLEG): container finished" podID="2c2e7857-9f07-40f6-91dc-68f4f4ac9708" containerID="520d16c7d05384911887a352162b5c08f4adfdc0fedd39296533897d473e3ef3" exitCode=0 Dec 05 01:16:11 crc kubenswrapper[4665]: I1205 01:16:11.511721 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" event={"ID":"2c2e7857-9f07-40f6-91dc-68f4f4ac9708","Type":"ContainerDied","Data":"520d16c7d05384911887a352162b5c08f4adfdc0fedd39296533897d473e3ef3"} Dec 05 01:16:11 crc kubenswrapper[4665]: I1205 01:16:11.946362 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.044618 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-client-ca\") pod \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.044728 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-config\") pod \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.044751 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-proxy-ca-bundles\") pod \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.044774 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-serving-cert\") pod \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.044807 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jr8q9\" (UniqueName: \"kubernetes.io/projected/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-kube-api-access-jr8q9\") pod \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\" (UID: \"2c2e7857-9f07-40f6-91dc-68f4f4ac9708\") " Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.045477 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-client-ca" (OuterVolumeSpecName: "client-ca") pod "2c2e7857-9f07-40f6-91dc-68f4f4ac9708" (UID: "2c2e7857-9f07-40f6-91dc-68f4f4ac9708"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.045516 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-config" (OuterVolumeSpecName: "config") pod "2c2e7857-9f07-40f6-91dc-68f4f4ac9708" (UID: "2c2e7857-9f07-40f6-91dc-68f4f4ac9708"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.046446 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2c2e7857-9f07-40f6-91dc-68f4f4ac9708" (UID: "2c2e7857-9f07-40f6-91dc-68f4f4ac9708"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.049857 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2c2e7857-9f07-40f6-91dc-68f4f4ac9708" (UID: "2c2e7857-9f07-40f6-91dc-68f4f4ac9708"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.052454 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-kube-api-access-jr8q9" (OuterVolumeSpecName: "kube-api-access-jr8q9") pod "2c2e7857-9f07-40f6-91dc-68f4f4ac9708" (UID: "2c2e7857-9f07-40f6-91dc-68f4f4ac9708"). InnerVolumeSpecName "kube-api-access-jr8q9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.146410 4665 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.146448 4665 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.146458 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.146467 4665 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.146476 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jr8q9\" (UniqueName: \"kubernetes.io/projected/2c2e7857-9f07-40f6-91dc-68f4f4ac9708-kube-api-access-jr8q9\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.521203 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" event={"ID":"2c2e7857-9f07-40f6-91dc-68f4f4ac9708","Type":"ContainerDied","Data":"677413dfd3c10f4f918000f635ecde416c70042d9c5f4e3fd346b72f711c3e66"} Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.521260 4665 scope.go:117] "RemoveContainer" containerID="520d16c7d05384911887a352162b5c08f4adfdc0fedd39296533897d473e3ef3" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.521365 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-85b9785f75-jnzc8" Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.573092 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-85b9785f75-jnzc8"] Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.582901 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-85b9785f75-jnzc8"] Dec 05 01:16:12 crc kubenswrapper[4665]: I1205 01:16:12.901371 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c2e7857-9f07-40f6-91dc-68f4f4ac9708" path="/var/lib/kubelet/pods/2c2e7857-9f07-40f6-91dc-68f4f4ac9708/volumes" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.032679 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-7688687f95-2slfb"] Dec 05 01:16:13 crc kubenswrapper[4665]: E1205 01:16:13.032907 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c2e7857-9f07-40f6-91dc-68f4f4ac9708" containerName="controller-manager" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.032922 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c2e7857-9f07-40f6-91dc-68f4f4ac9708" containerName="controller-manager" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.033055 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c2e7857-9f07-40f6-91dc-68f4f4ac9708" containerName="controller-manager" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.033600 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.035697 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.036331 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.036513 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.037000 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.037806 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.048047 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7688687f95-2slfb"] Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.049029 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.050466 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.157855 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/818641a2-e0cf-406d-84b2-6a04ba68db5d-serving-cert\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.157921 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-config\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.157974 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-client-ca\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.158000 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh2kg\" (UniqueName: \"kubernetes.io/projected/818641a2-e0cf-406d-84b2-6a04ba68db5d-kube-api-access-fh2kg\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.158025 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-proxy-ca-bundles\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.258965 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-config\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.259385 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-client-ca\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.259522 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh2kg\" (UniqueName: \"kubernetes.io/projected/818641a2-e0cf-406d-84b2-6a04ba68db5d-kube-api-access-fh2kg\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.259653 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-proxy-ca-bundles\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.259819 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/818641a2-e0cf-406d-84b2-6a04ba68db5d-serving-cert\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.260844 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-client-ca\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.261250 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-proxy-ca-bundles\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.262240 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/818641a2-e0cf-406d-84b2-6a04ba68db5d-config\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.267693 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/818641a2-e0cf-406d-84b2-6a04ba68db5d-serving-cert\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.282553 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh2kg\" (UniqueName: \"kubernetes.io/projected/818641a2-e0cf-406d-84b2-6a04ba68db5d-kube-api-access-fh2kg\") pod \"controller-manager-7688687f95-2slfb\" (UID: \"818641a2-e0cf-406d-84b2-6a04ba68db5d\") " pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.365801 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:13 crc kubenswrapper[4665]: I1205 01:16:13.829894 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-7688687f95-2slfb"] Dec 05 01:16:13 crc kubenswrapper[4665]: W1205 01:16:13.836731 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod818641a2_e0cf_406d_84b2_6a04ba68db5d.slice/crio-66dfb39dbaf809c26392044c9dbffaf89a704e88ede27425f09fc1a05d46e3de WatchSource:0}: Error finding container 66dfb39dbaf809c26392044c9dbffaf89a704e88ede27425f09fc1a05d46e3de: Status 404 returned error can't find the container with id 66dfb39dbaf809c26392044c9dbffaf89a704e88ede27425f09fc1a05d46e3de Dec 05 01:16:14 crc kubenswrapper[4665]: I1205 01:16:14.533439 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" event={"ID":"818641a2-e0cf-406d-84b2-6a04ba68db5d","Type":"ContainerStarted","Data":"20cbae5162dd2c7dbddd5a57b7569eb8169f0b3792b289e2e8a7ba66103aa3b6"} Dec 05 01:16:14 crc kubenswrapper[4665]: I1205 01:16:14.533492 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" event={"ID":"818641a2-e0cf-406d-84b2-6a04ba68db5d","Type":"ContainerStarted","Data":"66dfb39dbaf809c26392044c9dbffaf89a704e88ede27425f09fc1a05d46e3de"} Dec 05 01:16:14 crc kubenswrapper[4665]: I1205 01:16:14.533619 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:14 crc kubenswrapper[4665]: I1205 01:16:14.547790 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" Dec 05 01:16:14 crc kubenswrapper[4665]: I1205 01:16:14.568463 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-7688687f95-2slfb" podStartSLOduration=3.568441001 podStartE2EDuration="3.568441001s" podCreationTimestamp="2025-12-05 01:16:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:16:14.559545679 +0000 UTC m=+349.898937978" watchObservedRunningTime="2025-12-05 01:16:14.568441001 +0000 UTC m=+349.907833300" Dec 05 01:16:14 crc kubenswrapper[4665]: I1205 01:16:14.922321 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:16:14 crc kubenswrapper[4665]: I1205 01:16:14.922659 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.857220 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8vtrc"] Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.858128 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.865213 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8vtrc"] Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.992249 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sx2wg\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-kube-api-access-sx2wg\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.992313 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-bound-sa-token\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.992344 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-registry-tls\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.992373 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2f7d7d93-d181-471d-bc00-e7301ce6264c-trusted-ca\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.992401 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2f7d7d93-d181-471d-bc00-e7301ce6264c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.992426 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2f7d7d93-d181-471d-bc00-e7301ce6264c-registry-certificates\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.992487 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2f7d7d93-d181-471d-bc00-e7301ce6264c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:15 crc kubenswrapper[4665]: I1205 01:16:15.992535 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.014692 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.093967 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2f7d7d93-d181-471d-bc00-e7301ce6264c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.094044 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sx2wg\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-kube-api-access-sx2wg\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.094069 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-bound-sa-token\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.094090 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-registry-tls\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.094120 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2f7d7d93-d181-471d-bc00-e7301ce6264c-trusted-ca\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.094142 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2f7d7d93-d181-471d-bc00-e7301ce6264c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.094161 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2f7d7d93-d181-471d-bc00-e7301ce6264c-registry-certificates\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.094420 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2f7d7d93-d181-471d-bc00-e7301ce6264c-ca-trust-extracted\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.095494 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2f7d7d93-d181-471d-bc00-e7301ce6264c-registry-certificates\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.095498 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2f7d7d93-d181-471d-bc00-e7301ce6264c-trusted-ca\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.100070 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2f7d7d93-d181-471d-bc00-e7301ce6264c-installation-pull-secrets\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.105227 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-registry-tls\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.109981 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sx2wg\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-kube-api-access-sx2wg\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.112037 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2f7d7d93-d181-471d-bc00-e7301ce6264c-bound-sa-token\") pod \"image-registry-66df7c8f76-8vtrc\" (UID: \"2f7d7d93-d181-471d-bc00-e7301ce6264c\") " pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.173193 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:16 crc kubenswrapper[4665]: W1205 01:16:16.599921 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2f7d7d93_d181_471d_bc00_e7301ce6264c.slice/crio-537167842e63cf598e0791262801bd64e91fcfdefb5250c4343fe5ab0b34b657 WatchSource:0}: Error finding container 537167842e63cf598e0791262801bd64e91fcfdefb5250c4343fe5ab0b34b657: Status 404 returned error can't find the container with id 537167842e63cf598e0791262801bd64e91fcfdefb5250c4343fe5ab0b34b657 Dec 05 01:16:16 crc kubenswrapper[4665]: I1205 01:16:16.601079 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-8vtrc"] Dec 05 01:16:17 crc kubenswrapper[4665]: I1205 01:16:17.569070 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" event={"ID":"2f7d7d93-d181-471d-bc00-e7301ce6264c","Type":"ContainerStarted","Data":"bcff5defce43d66f24b61fa3e353b21e71ad38a642ee08fb3b4854e1ee706206"} Dec 05 01:16:17 crc kubenswrapper[4665]: I1205 01:16:17.570428 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" event={"ID":"2f7d7d93-d181-471d-bc00-e7301ce6264c","Type":"ContainerStarted","Data":"537167842e63cf598e0791262801bd64e91fcfdefb5250c4343fe5ab0b34b657"} Dec 05 01:16:17 crc kubenswrapper[4665]: I1205 01:16:17.570453 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:17 crc kubenswrapper[4665]: I1205 01:16:17.609425 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" podStartSLOduration=2.609396973 podStartE2EDuration="2.609396973s" podCreationTimestamp="2025-12-05 01:16:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:16:17.595843487 +0000 UTC m=+352.935235856" watchObservedRunningTime="2025-12-05 01:16:17.609396973 +0000 UTC m=+352.948789312" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.404499 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2b4bm"] Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.405283 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2b4bm" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerName="registry-server" containerID="cri-o://367224c4e94d47d5aa5afbf1e02214e4a9cdb1869484ea0c593146d354e8144f" gracePeriod=30 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.418422 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tdbf"] Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.433021 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqfg7"] Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.433249 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" containerID="cri-o://dc69b7f66187e71118730b508972eb6af8b4f45f73c411022ec3b3557ce5ba64" gracePeriod=30 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.447085 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcrmz"] Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.447407 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pcrmz" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerName="registry-server" containerID="cri-o://9252797d687f184ea7261d6875b6da77873914400152bb515cb3b02b7bbc3cca" gracePeriod=30 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.450705 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6zz57"] Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.451020 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6zz57" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerName="registry-server" containerID="cri-o://b7cdf2673a8fb651c2a2a0d8d05fc5c3e4d160a4f371560efa2aad057a25a3df" gracePeriod=30 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.456171 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6lq6k"] Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.458542 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.523605 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6lq6k"] Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.574898 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh7x7\" (UniqueName: \"kubernetes.io/projected/8b497962-196d-41aa-aacc-1d68536dfec6-kube-api-access-zh7x7\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.575040 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8b497962-196d-41aa-aacc-1d68536dfec6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.575090 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8b497962-196d-41aa-aacc-1d68536dfec6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.676020 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh7x7\" (UniqueName: \"kubernetes.io/projected/8b497962-196d-41aa-aacc-1d68536dfec6-kube-api-access-zh7x7\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.676104 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8b497962-196d-41aa-aacc-1d68536dfec6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.676127 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8b497962-196d-41aa-aacc-1d68536dfec6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.677618 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8b497962-196d-41aa-aacc-1d68536dfec6-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.684308 4665 generic.go:334] "Generic (PLEG): container finished" podID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerID="b7cdf2673a8fb651c2a2a0d8d05fc5c3e4d160a4f371560efa2aad057a25a3df" exitCode=0 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.684383 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zz57" event={"ID":"a5720325-40b1-49f2-a8a2-39dc7aef289a","Type":"ContainerDied","Data":"b7cdf2673a8fb651c2a2a0d8d05fc5c3e4d160a4f371560efa2aad057a25a3df"} Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.684987 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/8b497962-196d-41aa-aacc-1d68536dfec6-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.688023 4665 generic.go:334] "Generic (PLEG): container finished" podID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerID="367224c4e94d47d5aa5afbf1e02214e4a9cdb1869484ea0c593146d354e8144f" exitCode=0 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.688092 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2b4bm" event={"ID":"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd","Type":"ContainerDied","Data":"367224c4e94d47d5aa5afbf1e02214e4a9cdb1869484ea0c593146d354e8144f"} Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.696314 4665 generic.go:334] "Generic (PLEG): container finished" podID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerID="9252797d687f184ea7261d6875b6da77873914400152bb515cb3b02b7bbc3cca" exitCode=0 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.696399 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcrmz" event={"ID":"06fcff7e-da06-4d77-abbf-361c5c23f666","Type":"ContainerDied","Data":"9252797d687f184ea7261d6875b6da77873914400152bb515cb3b02b7bbc3cca"} Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.699516 4665 generic.go:334] "Generic (PLEG): container finished" podID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerID="dc69b7f66187e71118730b508972eb6af8b4f45f73c411022ec3b3557ce5ba64" exitCode=0 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.699695 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-4tdbf" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerName="registry-server" containerID="cri-o://d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a" gracePeriod=30 Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.699757 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" event={"ID":"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580","Type":"ContainerDied","Data":"dc69b7f66187e71118730b508972eb6af8b4f45f73c411022ec3b3557ce5ba64"} Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.699783 4665 scope.go:117] "RemoveContainer" containerID="540f8a0062548c6c18d5e58fb57f2a1122bf8ca515ffd25804e473bcd88da918" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.699922 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh7x7\" (UniqueName: \"kubernetes.io/projected/8b497962-196d-41aa-aacc-1d68536dfec6-kube-api-access-zh7x7\") pod \"marketplace-operator-79b997595-6lq6k\" (UID: \"8b497962-196d-41aa-aacc-1d68536dfec6\") " pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.784242 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.904263 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.980350 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-utilities\") pod \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.981148 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-utilities" (OuterVolumeSpecName: "utilities") pod "cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" (UID: "cdf1b771-7028-4cbd-ae5f-23cdf3784ecd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.981247 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-catalog-content\") pod \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.984549 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-kube-api-access-wxzjf" (OuterVolumeSpecName: "kube-api-access-wxzjf") pod "cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" (UID: "cdf1b771-7028-4cbd-ae5f-23cdf3784ecd"). InnerVolumeSpecName "kube-api-access-wxzjf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.981286 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxzjf\" (UniqueName: \"kubernetes.io/projected/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-kube-api-access-wxzjf\") pod \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\" (UID: \"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd\") " Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.995451 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:34 crc kubenswrapper[4665]: I1205 01:16:34.995476 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxzjf\" (UniqueName: \"kubernetes.io/projected/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-kube-api-access-wxzjf\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.086793 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" (UID: "cdf1b771-7028-4cbd-ae5f-23cdf3784ecd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.096419 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.261658 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.295829 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.306987 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.398673 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-catalog-content\") pod \"a5720325-40b1-49f2-a8a2-39dc7aef289a\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.398731 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics\") pod \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.398769 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca\") pod \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.398887 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bm7q\" (UniqueName: \"kubernetes.io/projected/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-kube-api-access-4bm7q\") pod \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\" (UID: \"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.398914 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-msp4n\" (UniqueName: \"kubernetes.io/projected/06fcff7e-da06-4d77-abbf-361c5c23f666-kube-api-access-msp4n\") pod \"06fcff7e-da06-4d77-abbf-361c5c23f666\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.398945 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-utilities\") pod \"a5720325-40b1-49f2-a8a2-39dc7aef289a\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.398982 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-catalog-content\") pod \"06fcff7e-da06-4d77-abbf-361c5c23f666\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.399018 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-utilities\") pod \"06fcff7e-da06-4d77-abbf-361c5c23f666\" (UID: \"06fcff7e-da06-4d77-abbf-361c5c23f666\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.399042 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z9lxw\" (UniqueName: \"kubernetes.io/projected/a5720325-40b1-49f2-a8a2-39dc7aef289a-kube-api-access-z9lxw\") pod \"a5720325-40b1-49f2-a8a2-39dc7aef289a\" (UID: \"a5720325-40b1-49f2-a8a2-39dc7aef289a\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.399554 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" (UID: "9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.399694 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-utilities" (OuterVolumeSpecName: "utilities") pod "a5720325-40b1-49f2-a8a2-39dc7aef289a" (UID: "a5720325-40b1-49f2-a8a2-39dc7aef289a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.400221 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-utilities" (OuterVolumeSpecName: "utilities") pod "06fcff7e-da06-4d77-abbf-361c5c23f666" (UID: "06fcff7e-da06-4d77-abbf-361c5c23f666"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.402593 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-kube-api-access-4bm7q" (OuterVolumeSpecName: "kube-api-access-4bm7q") pod "9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" (UID: "9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580"). InnerVolumeSpecName "kube-api-access-4bm7q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.403078 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06fcff7e-da06-4d77-abbf-361c5c23f666-kube-api-access-msp4n" (OuterVolumeSpecName: "kube-api-access-msp4n") pod "06fcff7e-da06-4d77-abbf-361c5c23f666" (UID: "06fcff7e-da06-4d77-abbf-361c5c23f666"). InnerVolumeSpecName "kube-api-access-msp4n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.403356 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" (UID: "9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.403909 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5720325-40b1-49f2-a8a2-39dc7aef289a-kube-api-access-z9lxw" (OuterVolumeSpecName: "kube-api-access-z9lxw") pod "a5720325-40b1-49f2-a8a2-39dc7aef289a" (UID: "a5720325-40b1-49f2-a8a2-39dc7aef289a"). InnerVolumeSpecName "kube-api-access-z9lxw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.426234 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "06fcff7e-da06-4d77-abbf-361c5c23f666" (UID: "06fcff7e-da06-4d77-abbf-361c5c23f666"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.453490 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-6lq6k"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.500897 4665 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.500929 4665 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.500938 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bm7q\" (UniqueName: \"kubernetes.io/projected/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580-kube-api-access-4bm7q\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.500946 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-msp4n\" (UniqueName: \"kubernetes.io/projected/06fcff7e-da06-4d77-abbf-361c5c23f666-kube-api-access-msp4n\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.500956 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.500965 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.500973 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/06fcff7e-da06-4d77-abbf-361c5c23f666-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.500981 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z9lxw\" (UniqueName: \"kubernetes.io/projected/a5720325-40b1-49f2-a8a2-39dc7aef289a-kube-api-access-z9lxw\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.551884 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a5720325-40b1-49f2-a8a2-39dc7aef289a" (UID: "a5720325-40b1-49f2-a8a2-39dc7aef289a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.601860 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a5720325-40b1-49f2-a8a2-39dc7aef289a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.629743 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.702434 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-catalog-content\") pod \"b37e5172-40b2-48e4-978e-cec68ac061e4\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.702539 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-utilities\") pod \"b37e5172-40b2-48e4-978e-cec68ac061e4\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.702572 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8dwmv\" (UniqueName: \"kubernetes.io/projected/b37e5172-40b2-48e4-978e-cec68ac061e4-kube-api-access-8dwmv\") pod \"b37e5172-40b2-48e4-978e-cec68ac061e4\" (UID: \"b37e5172-40b2-48e4-978e-cec68ac061e4\") " Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.704255 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-utilities" (OuterVolumeSpecName: "utilities") pod "b37e5172-40b2-48e4-978e-cec68ac061e4" (UID: "b37e5172-40b2-48e4-978e-cec68ac061e4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.718440 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" event={"ID":"8b497962-196d-41aa-aacc-1d68536dfec6","Type":"ContainerStarted","Data":"ef9479f5c597673449a370e4f1bf5342ad9af835caf1aa5613d382a227700734"} Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.722521 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2b4bm" event={"ID":"cdf1b771-7028-4cbd-ae5f-23cdf3784ecd","Type":"ContainerDied","Data":"922548825f0afce7250641cf7bf615824e4e62bfc4ce607e444d90fc1a4d0ec9"} Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.722584 4665 scope.go:117] "RemoveContainer" containerID="367224c4e94d47d5aa5afbf1e02214e4a9cdb1869484ea0c593146d354e8144f" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.722974 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2b4bm" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.733468 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b37e5172-40b2-48e4-978e-cec68ac061e4-kube-api-access-8dwmv" (OuterVolumeSpecName: "kube-api-access-8dwmv") pod "b37e5172-40b2-48e4-978e-cec68ac061e4" (UID: "b37e5172-40b2-48e4-978e-cec68ac061e4"). InnerVolumeSpecName "kube-api-access-8dwmv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.734593 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pcrmz" event={"ID":"06fcff7e-da06-4d77-abbf-361c5c23f666","Type":"ContainerDied","Data":"b1bea4597696dd357ce2af50e66bffc3932b69fdc86030db48ec5747f1a23ae2"} Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.734704 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pcrmz" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.760524 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-4tdbf" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.760822 4665 generic.go:334] "Generic (PLEG): container finished" podID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerID="d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a" exitCode=0 Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.761001 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tdbf" event={"ID":"b37e5172-40b2-48e4-978e-cec68ac061e4","Type":"ContainerDied","Data":"d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a"} Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.761369 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-4tdbf" event={"ID":"b37e5172-40b2-48e4-978e-cec68ac061e4","Type":"ContainerDied","Data":"76e05a8a45f493dea40f57b0b61813030d0cf682d28f4739218903f1708bd02d"} Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.767580 4665 scope.go:117] "RemoveContainer" containerID="e74f541e759e3582e8398b9c46fa32df257c731961dd2c4e5f5314c8c3e81d22" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.769140 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" event={"ID":"9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580","Type":"ContainerDied","Data":"c9242d3d8e3e6b80420ce349c51306f8e43b54136c80ac3a06bd298be8456433"} Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.769372 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tqfg7" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.775208 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2b4bm"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.790005 4665 scope.go:117] "RemoveContainer" containerID="6d3765fcaf6e1a9c5a3dd09ef162226d586567679a26c3766b9b8d4eefad520d" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.791386 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6zz57" event={"ID":"a5720325-40b1-49f2-a8a2-39dc7aef289a","Type":"ContainerDied","Data":"fa932e1a96bd2ff5799f6dfc85730bdbb2c59eb410f1dde1320bd44186a14e35"} Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.791490 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6zz57" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.794742 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2b4bm"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.803349 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8dwmv\" (UniqueName: \"kubernetes.io/projected/b37e5172-40b2-48e4-978e-cec68ac061e4-kube-api-access-8dwmv\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.804029 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.807648 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcrmz"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.810288 4665 scope.go:117] "RemoveContainer" containerID="9252797d687f184ea7261d6875b6da77873914400152bb515cb3b02b7bbc3cca" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.821277 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pcrmz"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.824763 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqfg7"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.826187 4665 scope.go:117] "RemoveContainer" containerID="92dbf87c208d5758a822fe4db6bda572668b806c7d26ae377bd36227832ac28d" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.828581 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tqfg7"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.833029 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6zz57"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.837244 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6zz57"] Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.838190 4665 scope.go:117] "RemoveContainer" containerID="d6c237fb62fde4e580aaeb150f1f1ebbd873178e93efbd15aa592e0179c9edd9" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.839563 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b37e5172-40b2-48e4-978e-cec68ac061e4" (UID: "b37e5172-40b2-48e4-978e-cec68ac061e4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.850483 4665 scope.go:117] "RemoveContainer" containerID="d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.863870 4665 scope.go:117] "RemoveContainer" containerID="1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.876269 4665 scope.go:117] "RemoveContainer" containerID="a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.887925 4665 scope.go:117] "RemoveContainer" containerID="d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a" Dec 05 01:16:35 crc kubenswrapper[4665]: E1205 01:16:35.888248 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a\": container with ID starting with d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a not found: ID does not exist" containerID="d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.888327 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a"} err="failed to get container status \"d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a\": rpc error: code = NotFound desc = could not find container \"d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a\": container with ID starting with d76eb183f0062530880dc1bbd7aeb863e10a6ae3f9f96afea5dbc1ae5d06694a not found: ID does not exist" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.888351 4665 scope.go:117] "RemoveContainer" containerID="1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe" Dec 05 01:16:35 crc kubenswrapper[4665]: E1205 01:16:35.888634 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe\": container with ID starting with 1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe not found: ID does not exist" containerID="1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.888665 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe"} err="failed to get container status \"1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe\": rpc error: code = NotFound desc = could not find container \"1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe\": container with ID starting with 1cfbee8a71b6e41942fae29639fa8e461b7568defd2c3d0827690980fb7338fe not found: ID does not exist" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.888688 4665 scope.go:117] "RemoveContainer" containerID="a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03" Dec 05 01:16:35 crc kubenswrapper[4665]: E1205 01:16:35.888931 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03\": container with ID starting with a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03 not found: ID does not exist" containerID="a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.888960 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03"} err="failed to get container status \"a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03\": rpc error: code = NotFound desc = could not find container \"a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03\": container with ID starting with a39a6f15cb10e181ae7e37ecfc8d688759591517d937ad425613ce792f8d0b03 not found: ID does not exist" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.888978 4665 scope.go:117] "RemoveContainer" containerID="dc69b7f66187e71118730b508972eb6af8b4f45f73c411022ec3b3557ce5ba64" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.898826 4665 scope.go:117] "RemoveContainer" containerID="b7cdf2673a8fb651c2a2a0d8d05fc5c3e4d160a4f371560efa2aad057a25a3df" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.904677 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b37e5172-40b2-48e4-978e-cec68ac061e4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.909212 4665 scope.go:117] "RemoveContainer" containerID="f399cd8f30d94b108d29790c5fc100a5129f7440dba4b7e8cef529009d29936d" Dec 05 01:16:35 crc kubenswrapper[4665]: I1205 01:16:35.922669 4665 scope.go:117] "RemoveContainer" containerID="20d0e4edd86a98e76b4a6cdaabc818cc45a774e03c870d685ec2857a5c401a56" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.086409 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-4tdbf"] Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.091045 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-4tdbf"] Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.179987 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-8vtrc" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.240600 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbw2d"] Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.626620 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ldv79"] Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.626910 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.626930 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.626954 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerName="extract-utilities" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.626967 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerName="extract-utilities" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.626986 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerName="extract-content" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.626999 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerName="extract-content" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627016 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627028 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627044 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627056 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627069 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerName="extract-content" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627081 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerName="extract-content" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627098 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerName="extract-content" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627110 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerName="extract-content" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627128 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627140 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627157 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerName="extract-content" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627169 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerName="extract-content" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627183 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerName="extract-utilities" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627197 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerName="extract-utilities" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627216 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerName="extract-utilities" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627228 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerName="extract-utilities" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627251 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627262 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627279 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627292 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: E1205 01:16:36.627333 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerName="extract-utilities" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627345 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerName="extract-utilities" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627524 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627549 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627562 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627575 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627600 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" containerName="marketplace-operator" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.627618 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" containerName="registry-server" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.628780 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.638804 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.646990 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ldv79"] Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.715908 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f4973ad-12ad-421c-b68f-9b47206f2e2e-catalog-content\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.715969 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f4973ad-12ad-421c-b68f-9b47206f2e2e-utilities\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.716006 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grp9k\" (UniqueName: \"kubernetes.io/projected/7f4973ad-12ad-421c-b68f-9b47206f2e2e-kube-api-access-grp9k\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.812273 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" event={"ID":"8b497962-196d-41aa-aacc-1d68536dfec6","Type":"ContainerStarted","Data":"4358fd8f82cb82ca0a5e6d5fc7551dbd1451165f9948e8cf66fb2847b5c7355b"} Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.813442 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.814329 4665 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-6lq6k container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" start-of-body= Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.814358 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" podUID="8b497962-196d-41aa-aacc-1d68536dfec6" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.64:8080/healthz\": dial tcp 10.217.0.64:8080: connect: connection refused" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.817005 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f4973ad-12ad-421c-b68f-9b47206f2e2e-utilities\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.817049 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grp9k\" (UniqueName: \"kubernetes.io/projected/7f4973ad-12ad-421c-b68f-9b47206f2e2e-kube-api-access-grp9k\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.817098 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f4973ad-12ad-421c-b68f-9b47206f2e2e-catalog-content\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.817520 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7f4973ad-12ad-421c-b68f-9b47206f2e2e-utilities\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.817538 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7f4973ad-12ad-421c-b68f-9b47206f2e2e-catalog-content\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.823756 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tlrk9"] Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.824732 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.826712 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.838535 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" podStartSLOduration=2.838512248 podStartE2EDuration="2.838512248s" podCreationTimestamp="2025-12-05 01:16:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:16:36.830869779 +0000 UTC m=+372.170262098" watchObservedRunningTime="2025-12-05 01:16:36.838512248 +0000 UTC m=+372.177904547" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.843323 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grp9k\" (UniqueName: \"kubernetes.io/projected/7f4973ad-12ad-421c-b68f-9b47206f2e2e-kube-api-access-grp9k\") pod \"redhat-marketplace-ldv79\" (UID: \"7f4973ad-12ad-421c-b68f-9b47206f2e2e\") " pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.846489 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tlrk9"] Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.898973 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06fcff7e-da06-4d77-abbf-361c5c23f666" path="/var/lib/kubelet/pods/06fcff7e-da06-4d77-abbf-361c5c23f666/volumes" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.899801 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580" path="/var/lib/kubelet/pods/9a9ed7fd-cb93-448a-a3b2-7c02e4d5a580/volumes" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.900259 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5720325-40b1-49f2-a8a2-39dc7aef289a" path="/var/lib/kubelet/pods/a5720325-40b1-49f2-a8a2-39dc7aef289a/volumes" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.901285 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b37e5172-40b2-48e4-978e-cec68ac061e4" path="/var/lib/kubelet/pods/b37e5172-40b2-48e4-978e-cec68ac061e4/volumes" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.901889 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cdf1b771-7028-4cbd-ae5f-23cdf3784ecd" path="/var/lib/kubelet/pods/cdf1b771-7028-4cbd-ae5f-23cdf3784ecd/volumes" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.918369 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-catalog-content\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.918427 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-utilities\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.918469 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhfdb\" (UniqueName: \"kubernetes.io/projected/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-kube-api-access-vhfdb\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:36 crc kubenswrapper[4665]: I1205 01:16:36.949613 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.019751 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-catalog-content\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.020025 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-utilities\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.020056 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhfdb\" (UniqueName: \"kubernetes.io/projected/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-kube-api-access-vhfdb\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.020818 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-catalog-content\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.021097 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-utilities\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.048871 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhfdb\" (UniqueName: \"kubernetes.io/projected/3e6ec612-5d6b-4431-ba68-690a5c6c9c2a-kube-api-access-vhfdb\") pod \"redhat-operators-tlrk9\" (UID: \"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a\") " pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.143638 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:37 crc kubenswrapper[4665]: W1205 01:16:37.342407 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f4973ad_12ad_421c_b68f_9b47206f2e2e.slice/crio-3a775c8e9727143233e8ad82fb4505158696c28fc8f18df6e061e5789197cad8 WatchSource:0}: Error finding container 3a775c8e9727143233e8ad82fb4505158696c28fc8f18df6e061e5789197cad8: Status 404 returned error can't find the container with id 3a775c8e9727143233e8ad82fb4505158696c28fc8f18df6e061e5789197cad8 Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.346440 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ldv79"] Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.529657 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tlrk9"] Dec 05 01:16:37 crc kubenswrapper[4665]: W1205 01:16:37.534338 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e6ec612_5d6b_4431_ba68_690a5c6c9c2a.slice/crio-3ed9dc3ac107f99b3b38c54335cec63acace39a479ad3011c90d5ee86f8aeb2c WatchSource:0}: Error finding container 3ed9dc3ac107f99b3b38c54335cec63acace39a479ad3011c90d5ee86f8aeb2c: Status 404 returned error can't find the container with id 3ed9dc3ac107f99b3b38c54335cec63acace39a479ad3011c90d5ee86f8aeb2c Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.830857 4665 generic.go:334] "Generic (PLEG): container finished" podID="7f4973ad-12ad-421c-b68f-9b47206f2e2e" containerID="1b71197b86ac9e7b88237dfb10e998d04772be9de575c531e7034326ade6a35a" exitCode=0 Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.830970 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ldv79" event={"ID":"7f4973ad-12ad-421c-b68f-9b47206f2e2e","Type":"ContainerDied","Data":"1b71197b86ac9e7b88237dfb10e998d04772be9de575c531e7034326ade6a35a"} Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.831030 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ldv79" event={"ID":"7f4973ad-12ad-421c-b68f-9b47206f2e2e","Type":"ContainerStarted","Data":"3a775c8e9727143233e8ad82fb4505158696c28fc8f18df6e061e5789197cad8"} Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.833506 4665 generic.go:334] "Generic (PLEG): container finished" podID="3e6ec612-5d6b-4431-ba68-690a5c6c9c2a" containerID="3a66c2102b55497db0db0b5dce5b0f38820ac584544f0858fbcec3fad93b03f4" exitCode=0 Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.833603 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tlrk9" event={"ID":"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a","Type":"ContainerDied","Data":"3a66c2102b55497db0db0b5dce5b0f38820ac584544f0858fbcec3fad93b03f4"} Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.836838 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tlrk9" event={"ID":"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a","Type":"ContainerStarted","Data":"3ed9dc3ac107f99b3b38c54335cec63acace39a479ad3011c90d5ee86f8aeb2c"} Dec 05 01:16:37 crc kubenswrapper[4665]: I1205 01:16:37.839731 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-6lq6k" Dec 05 01:16:38 crc kubenswrapper[4665]: I1205 01:16:38.840363 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tlrk9" event={"ID":"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a","Type":"ContainerStarted","Data":"182f893fc40822bf085555773e9e5c66fe5e207068a72b53610c27ebfc178df4"} Dec 05 01:16:38 crc kubenswrapper[4665]: I1205 01:16:38.842367 4665 generic.go:334] "Generic (PLEG): container finished" podID="7f4973ad-12ad-421c-b68f-9b47206f2e2e" containerID="9ab9776d6411657b9e7392cbb85eaa4ac5138965cd3ca0e565b0f32e1165e200" exitCode=0 Dec 05 01:16:38 crc kubenswrapper[4665]: I1205 01:16:38.843443 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ldv79" event={"ID":"7f4973ad-12ad-421c-b68f-9b47206f2e2e","Type":"ContainerDied","Data":"9ab9776d6411657b9e7392cbb85eaa4ac5138965cd3ca0e565b0f32e1165e200"} Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.025459 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-pkwjd"] Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.026827 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.031557 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.035931 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pkwjd"] Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.148923 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0995d7c1-230b-46a5-9136-c644fa9faf86-utilities\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.149007 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kpq62\" (UniqueName: \"kubernetes.io/projected/0995d7c1-230b-46a5-9136-c644fa9faf86-kube-api-access-kpq62\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.149053 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0995d7c1-230b-46a5-9136-c644fa9faf86-catalog-content\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.241507 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9dz8h"] Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.243345 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.246415 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.250477 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0995d7c1-230b-46a5-9136-c644fa9faf86-utilities\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.250542 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kpq62\" (UniqueName: \"kubernetes.io/projected/0995d7c1-230b-46a5-9136-c644fa9faf86-kube-api-access-kpq62\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.250584 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0995d7c1-230b-46a5-9136-c644fa9faf86-catalog-content\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.251079 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0995d7c1-230b-46a5-9136-c644fa9faf86-catalog-content\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.251949 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0995d7c1-230b-46a5-9136-c644fa9faf86-utilities\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.252190 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9dz8h"] Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.274092 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kpq62\" (UniqueName: \"kubernetes.io/projected/0995d7c1-230b-46a5-9136-c644fa9faf86-kube-api-access-kpq62\") pod \"certified-operators-pkwjd\" (UID: \"0995d7c1-230b-46a5-9136-c644fa9faf86\") " pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.342521 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.351207 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-62z52\" (UniqueName: \"kubernetes.io/projected/9182b0cf-85af-4df8-81d6-5f1f407631ac-kube-api-access-62z52\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.351262 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9182b0cf-85af-4df8-81d6-5f1f407631ac-utilities\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.351310 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9182b0cf-85af-4df8-81d6-5f1f407631ac-catalog-content\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.452258 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9182b0cf-85af-4df8-81d6-5f1f407631ac-utilities\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.452329 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9182b0cf-85af-4df8-81d6-5f1f407631ac-catalog-content\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.452877 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9182b0cf-85af-4df8-81d6-5f1f407631ac-utilities\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.452904 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9182b0cf-85af-4df8-81d6-5f1f407631ac-catalog-content\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.452995 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-62z52\" (UniqueName: \"kubernetes.io/projected/9182b0cf-85af-4df8-81d6-5f1f407631ac-kube-api-access-62z52\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.484592 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-62z52\" (UniqueName: \"kubernetes.io/projected/9182b0cf-85af-4df8-81d6-5f1f407631ac-kube-api-access-62z52\") pod \"community-operators-9dz8h\" (UID: \"9182b0cf-85af-4df8-81d6-5f1f407631ac\") " pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.570586 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.740049 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-pkwjd"] Dec 05 01:16:39 crc kubenswrapper[4665]: W1205 01:16:39.747279 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0995d7c1_230b_46a5_9136_c644fa9faf86.slice/crio-a5bdac93a73a352268efc38d4ad9ea1ad41c7fabec88cb364ed3ce2968c4e38d WatchSource:0}: Error finding container a5bdac93a73a352268efc38d4ad9ea1ad41c7fabec88cb364ed3ce2968c4e38d: Status 404 returned error can't find the container with id a5bdac93a73a352268efc38d4ad9ea1ad41c7fabec88cb364ed3ce2968c4e38d Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.850376 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ldv79" event={"ID":"7f4973ad-12ad-421c-b68f-9b47206f2e2e","Type":"ContainerStarted","Data":"697cc6253bae25dfb2e23246d97fe960a06a5c0ca12b6c85e2944cab5fbd14d0"} Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.851390 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pkwjd" event={"ID":"0995d7c1-230b-46a5-9136-c644fa9faf86","Type":"ContainerStarted","Data":"a5bdac93a73a352268efc38d4ad9ea1ad41c7fabec88cb364ed3ce2968c4e38d"} Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.854450 4665 generic.go:334] "Generic (PLEG): container finished" podID="3e6ec612-5d6b-4431-ba68-690a5c6c9c2a" containerID="182f893fc40822bf085555773e9e5c66fe5e207068a72b53610c27ebfc178df4" exitCode=0 Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.855780 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tlrk9" event={"ID":"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a","Type":"ContainerDied","Data":"182f893fc40822bf085555773e9e5c66fe5e207068a72b53610c27ebfc178df4"} Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.871411 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ldv79" podStartSLOduration=2.423264164 podStartE2EDuration="3.871395401s" podCreationTimestamp="2025-12-05 01:16:36 +0000 UTC" firstStartedPulling="2025-12-05 01:16:37.834457539 +0000 UTC m=+373.173849838" lastFinishedPulling="2025-12-05 01:16:39.282588786 +0000 UTC m=+374.621981075" observedRunningTime="2025-12-05 01:16:39.863817363 +0000 UTC m=+375.203209672" watchObservedRunningTime="2025-12-05 01:16:39.871395401 +0000 UTC m=+375.210787700" Dec 05 01:16:39 crc kubenswrapper[4665]: I1205 01:16:39.961269 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9dz8h"] Dec 05 01:16:39 crc kubenswrapper[4665]: W1205 01:16:39.965095 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9182b0cf_85af_4df8_81d6_5f1f407631ac.slice/crio-8f0840ad30462b5deb2e56c9e584f572be7b412898b6e41c66f745e698e50823 WatchSource:0}: Error finding container 8f0840ad30462b5deb2e56c9e584f572be7b412898b6e41c66f745e698e50823: Status 404 returned error can't find the container with id 8f0840ad30462b5deb2e56c9e584f572be7b412898b6e41c66f745e698e50823 Dec 05 01:16:40 crc kubenswrapper[4665]: I1205 01:16:40.861264 4665 generic.go:334] "Generic (PLEG): container finished" podID="0995d7c1-230b-46a5-9136-c644fa9faf86" containerID="35ab34280d95fa669e7e9e9440435daec02ead3269e3800626203ae1c0049003" exitCode=0 Dec 05 01:16:40 crc kubenswrapper[4665]: I1205 01:16:40.861342 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pkwjd" event={"ID":"0995d7c1-230b-46a5-9136-c644fa9faf86","Type":"ContainerDied","Data":"35ab34280d95fa669e7e9e9440435daec02ead3269e3800626203ae1c0049003"} Dec 05 01:16:40 crc kubenswrapper[4665]: I1205 01:16:40.874567 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tlrk9" event={"ID":"3e6ec612-5d6b-4431-ba68-690a5c6c9c2a","Type":"ContainerStarted","Data":"20e9bd6db31ebd2828a4a61391aa8cc064f44f00a907e10f4babb912624ea699"} Dec 05 01:16:40 crc kubenswrapper[4665]: I1205 01:16:40.875918 4665 generic.go:334] "Generic (PLEG): container finished" podID="9182b0cf-85af-4df8-81d6-5f1f407631ac" containerID="3104b5ecfcf1193fb18eb18186ac21fc991a49ff49e01f2267b35715850212fe" exitCode=0 Dec 05 01:16:40 crc kubenswrapper[4665]: I1205 01:16:40.877486 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9dz8h" event={"ID":"9182b0cf-85af-4df8-81d6-5f1f407631ac","Type":"ContainerDied","Data":"3104b5ecfcf1193fb18eb18186ac21fc991a49ff49e01f2267b35715850212fe"} Dec 05 01:16:40 crc kubenswrapper[4665]: I1205 01:16:40.877533 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9dz8h" event={"ID":"9182b0cf-85af-4df8-81d6-5f1f407631ac","Type":"ContainerStarted","Data":"8f0840ad30462b5deb2e56c9e584f572be7b412898b6e41c66f745e698e50823"} Dec 05 01:16:40 crc kubenswrapper[4665]: I1205 01:16:40.897751 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tlrk9" podStartSLOduration=2.488132855 podStartE2EDuration="4.897734996s" podCreationTimestamp="2025-12-05 01:16:36 +0000 UTC" firstStartedPulling="2025-12-05 01:16:37.834968233 +0000 UTC m=+373.174360572" lastFinishedPulling="2025-12-05 01:16:40.244570414 +0000 UTC m=+375.583962713" observedRunningTime="2025-12-05 01:16:40.897643404 +0000 UTC m=+376.237035713" watchObservedRunningTime="2025-12-05 01:16:40.897734996 +0000 UTC m=+376.237127295" Dec 05 01:16:41 crc kubenswrapper[4665]: I1205 01:16:41.881755 4665 generic.go:334] "Generic (PLEG): container finished" podID="9182b0cf-85af-4df8-81d6-5f1f407631ac" containerID="5b11928c5c356ccaa4df8f6ee2128b8e0748742cde96ecd19c70094a898ff832" exitCode=0 Dec 05 01:16:41 crc kubenswrapper[4665]: I1205 01:16:41.881839 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9dz8h" event={"ID":"9182b0cf-85af-4df8-81d6-5f1f407631ac","Type":"ContainerDied","Data":"5b11928c5c356ccaa4df8f6ee2128b8e0748742cde96ecd19c70094a898ff832"} Dec 05 01:16:41 crc kubenswrapper[4665]: I1205 01:16:41.884039 4665 generic.go:334] "Generic (PLEG): container finished" podID="0995d7c1-230b-46a5-9136-c644fa9faf86" containerID="ff6544b62d47b69673d7d6ad806f5353b9234546ab0174613dac75d58eb94655" exitCode=0 Dec 05 01:16:41 crc kubenswrapper[4665]: I1205 01:16:41.884114 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pkwjd" event={"ID":"0995d7c1-230b-46a5-9136-c644fa9faf86","Type":"ContainerDied","Data":"ff6544b62d47b69673d7d6ad806f5353b9234546ab0174613dac75d58eb94655"} Dec 05 01:16:42 crc kubenswrapper[4665]: I1205 01:16:42.890045 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9dz8h" event={"ID":"9182b0cf-85af-4df8-81d6-5f1f407631ac","Type":"ContainerStarted","Data":"cfa218345e152735122739017da329405d1bc07293cc1830193089ea3f22cec8"} Dec 05 01:16:42 crc kubenswrapper[4665]: I1205 01:16:42.898376 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-pkwjd" event={"ID":"0995d7c1-230b-46a5-9136-c644fa9faf86","Type":"ContainerStarted","Data":"f8a451db02d92ffb565c0d6d59287a670b01f195ec5d941f9b143eb7729275e9"} Dec 05 01:16:42 crc kubenswrapper[4665]: I1205 01:16:42.908826 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9dz8h" podStartSLOduration=2.216777225 podStartE2EDuration="3.908811266s" podCreationTimestamp="2025-12-05 01:16:39 +0000 UTC" firstStartedPulling="2025-12-05 01:16:40.884915008 +0000 UTC m=+376.224307307" lastFinishedPulling="2025-12-05 01:16:42.576949049 +0000 UTC m=+377.916341348" observedRunningTime="2025-12-05 01:16:42.907481393 +0000 UTC m=+378.246873692" watchObservedRunningTime="2025-12-05 01:16:42.908811266 +0000 UTC m=+378.248203565" Dec 05 01:16:42 crc kubenswrapper[4665]: I1205 01:16:42.930227 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-pkwjd" podStartSLOduration=2.523058739 podStartE2EDuration="3.930209847s" podCreationTimestamp="2025-12-05 01:16:39 +0000 UTC" firstStartedPulling="2025-12-05 01:16:40.878830018 +0000 UTC m=+376.218222317" lastFinishedPulling="2025-12-05 01:16:42.285981116 +0000 UTC m=+377.625373425" observedRunningTime="2025-12-05 01:16:42.929008287 +0000 UTC m=+378.268400586" watchObservedRunningTime="2025-12-05 01:16:42.930209847 +0000 UTC m=+378.269602146" Dec 05 01:16:44 crc kubenswrapper[4665]: I1205 01:16:44.923270 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:16:44 crc kubenswrapper[4665]: I1205 01:16:44.923933 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:16:46 crc kubenswrapper[4665]: I1205 01:16:46.949901 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:46 crc kubenswrapper[4665]: I1205 01:16:46.950214 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:46 crc kubenswrapper[4665]: I1205 01:16:46.993182 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:47 crc kubenswrapper[4665]: I1205 01:16:47.144905 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:47 crc kubenswrapper[4665]: I1205 01:16:47.144962 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:47 crc kubenswrapper[4665]: I1205 01:16:47.181565 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:47 crc kubenswrapper[4665]: I1205 01:16:47.962507 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tlrk9" Dec 05 01:16:47 crc kubenswrapper[4665]: I1205 01:16:47.971562 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ldv79" Dec 05 01:16:49 crc kubenswrapper[4665]: I1205 01:16:49.343394 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:49 crc kubenswrapper[4665]: I1205 01:16:49.343706 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:49 crc kubenswrapper[4665]: I1205 01:16:49.390808 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:16:49 crc kubenswrapper[4665]: I1205 01:16:49.573858 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:49 crc kubenswrapper[4665]: I1205 01:16:49.573907 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:49 crc kubenswrapper[4665]: I1205 01:16:49.609192 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:49 crc kubenswrapper[4665]: I1205 01:16:49.976428 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9dz8h" Dec 05 01:16:49 crc kubenswrapper[4665]: I1205 01:16:49.988729 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-pkwjd" Dec 05 01:17:01 crc kubenswrapper[4665]: I1205 01:17:01.288351 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" podUID="bef0c5a0-6fef-4199-8782-99bc78b35374" containerName="registry" containerID="cri-o://00fa475f62cc93092b18ad1797b5910e224a7dabfefea707b747c452169442ab" gracePeriod=30 Dec 05 01:17:01 crc kubenswrapper[4665]: I1205 01:17:01.995355 4665 generic.go:334] "Generic (PLEG): container finished" podID="bef0c5a0-6fef-4199-8782-99bc78b35374" containerID="00fa475f62cc93092b18ad1797b5910e224a7dabfefea707b747c452169442ab" exitCode=0 Dec 05 01:17:01 crc kubenswrapper[4665]: I1205 01:17:01.995440 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" event={"ID":"bef0c5a0-6fef-4199-8782-99bc78b35374","Type":"ContainerDied","Data":"00fa475f62cc93092b18ad1797b5910e224a7dabfefea707b747c452169442ab"} Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.155651 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.211576 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbvlz\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-kube-api-access-jbvlz\") pod \"bef0c5a0-6fef-4199-8782-99bc78b35374\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.211757 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"bef0c5a0-6fef-4199-8782-99bc78b35374\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.211826 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-certificates\") pod \"bef0c5a0-6fef-4199-8782-99bc78b35374\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.211851 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-trusted-ca\") pod \"bef0c5a0-6fef-4199-8782-99bc78b35374\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.211871 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bef0c5a0-6fef-4199-8782-99bc78b35374-installation-pull-secrets\") pod \"bef0c5a0-6fef-4199-8782-99bc78b35374\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.211932 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-tls\") pod \"bef0c5a0-6fef-4199-8782-99bc78b35374\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.212484 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bef0c5a0-6fef-4199-8782-99bc78b35374" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.212541 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "bef0c5a0-6fef-4199-8782-99bc78b35374" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.213321 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-bound-sa-token\") pod \"bef0c5a0-6fef-4199-8782-99bc78b35374\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.213422 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bef0c5a0-6fef-4199-8782-99bc78b35374-ca-trust-extracted\") pod \"bef0c5a0-6fef-4199-8782-99bc78b35374\" (UID: \"bef0c5a0-6fef-4199-8782-99bc78b35374\") " Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.213989 4665 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.214033 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bef0c5a0-6fef-4199-8782-99bc78b35374-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.218570 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "bef0c5a0-6fef-4199-8782-99bc78b35374" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.218638 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-kube-api-access-jbvlz" (OuterVolumeSpecName: "kube-api-access-jbvlz") pod "bef0c5a0-6fef-4199-8782-99bc78b35374" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374"). InnerVolumeSpecName "kube-api-access-jbvlz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.218697 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bef0c5a0-6fef-4199-8782-99bc78b35374" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.220820 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bef0c5a0-6fef-4199-8782-99bc78b35374-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "bef0c5a0-6fef-4199-8782-99bc78b35374" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.221230 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "bef0c5a0-6fef-4199-8782-99bc78b35374" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.228955 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bef0c5a0-6fef-4199-8782-99bc78b35374-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "bef0c5a0-6fef-4199-8782-99bc78b35374" (UID: "bef0c5a0-6fef-4199-8782-99bc78b35374"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.314865 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbvlz\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-kube-api-access-jbvlz\") on node \"crc\" DevicePath \"\"" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.314903 4665 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/bef0c5a0-6fef-4199-8782-99bc78b35374-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.314913 4665 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.314922 4665 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bef0c5a0-6fef-4199-8782-99bc78b35374-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 01:17:02 crc kubenswrapper[4665]: I1205 01:17:02.314930 4665 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/bef0c5a0-6fef-4199-8782-99bc78b35374-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 01:17:03 crc kubenswrapper[4665]: I1205 01:17:03.002991 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" event={"ID":"bef0c5a0-6fef-4199-8782-99bc78b35374","Type":"ContainerDied","Data":"87fd3ea6d516b42da7703c27d1a32b6eaf99fb5087debd5dacf473163766dde7"} Dec 05 01:17:03 crc kubenswrapper[4665]: I1205 01:17:03.003557 4665 scope.go:117] "RemoveContainer" containerID="00fa475f62cc93092b18ad1797b5910e224a7dabfefea707b747c452169442ab" Dec 05 01:17:03 crc kubenswrapper[4665]: I1205 01:17:03.003018 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-xbw2d" Dec 05 01:17:03 crc kubenswrapper[4665]: I1205 01:17:03.029599 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbw2d"] Dec 05 01:17:03 crc kubenswrapper[4665]: I1205 01:17:03.036160 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-xbw2d"] Dec 05 01:17:04 crc kubenswrapper[4665]: I1205 01:17:04.901359 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bef0c5a0-6fef-4199-8782-99bc78b35374" path="/var/lib/kubelet/pods/bef0c5a0-6fef-4199-8782-99bc78b35374/volumes" Dec 05 01:17:14 crc kubenswrapper[4665]: I1205 01:17:14.922924 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:17:14 crc kubenswrapper[4665]: I1205 01:17:14.923498 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:17:14 crc kubenswrapper[4665]: I1205 01:17:14.923535 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:17:14 crc kubenswrapper[4665]: I1205 01:17:14.923928 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5bc07be9bd709e1be646d373aeef4fffe2def85b634b1bda0f9a262e2181517"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:17:14 crc kubenswrapper[4665]: I1205 01:17:14.924008 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://d5bc07be9bd709e1be646d373aeef4fffe2def85b634b1bda0f9a262e2181517" gracePeriod=600 Dec 05 01:17:15 crc kubenswrapper[4665]: I1205 01:17:15.070734 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="d5bc07be9bd709e1be646d373aeef4fffe2def85b634b1bda0f9a262e2181517" exitCode=0 Dec 05 01:17:15 crc kubenswrapper[4665]: I1205 01:17:15.070769 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"d5bc07be9bd709e1be646d373aeef4fffe2def85b634b1bda0f9a262e2181517"} Dec 05 01:17:15 crc kubenswrapper[4665]: I1205 01:17:15.070800 4665 scope.go:117] "RemoveContainer" containerID="8f8207b98790ef2f0b95c0729b8954d7ecce66214362018aa4fd62a26c2e76a2" Dec 05 01:17:16 crc kubenswrapper[4665]: I1205 01:17:16.077638 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"2cde39d7d4a0201ce1c07c9117b9e415325e344e62e60943726ee18c016a15d3"} Dec 05 01:19:44 crc kubenswrapper[4665]: I1205 01:19:44.922850 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:19:44 crc kubenswrapper[4665]: I1205 01:19:44.923662 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:20:14 crc kubenswrapper[4665]: I1205 01:20:14.922081 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:20:14 crc kubenswrapper[4665]: I1205 01:20:14.922740 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:20:44 crc kubenswrapper[4665]: I1205 01:20:44.922158 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:20:44 crc kubenswrapper[4665]: I1205 01:20:44.923067 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:20:44 crc kubenswrapper[4665]: I1205 01:20:44.923148 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:20:44 crc kubenswrapper[4665]: I1205 01:20:44.924060 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2cde39d7d4a0201ce1c07c9117b9e415325e344e62e60943726ee18c016a15d3"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:20:44 crc kubenswrapper[4665]: I1205 01:20:44.924156 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://2cde39d7d4a0201ce1c07c9117b9e415325e344e62e60943726ee18c016a15d3" gracePeriod=600 Dec 05 01:20:45 crc kubenswrapper[4665]: I1205 01:20:45.285396 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="2cde39d7d4a0201ce1c07c9117b9e415325e344e62e60943726ee18c016a15d3" exitCode=0 Dec 05 01:20:45 crc kubenswrapper[4665]: I1205 01:20:45.285462 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"2cde39d7d4a0201ce1c07c9117b9e415325e344e62e60943726ee18c016a15d3"} Dec 05 01:20:45 crc kubenswrapper[4665]: I1205 01:20:45.285795 4665 scope.go:117] "RemoveContainer" containerID="d5bc07be9bd709e1be646d373aeef4fffe2def85b634b1bda0f9a262e2181517" Dec 05 01:20:46 crc kubenswrapper[4665]: I1205 01:20:46.292107 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"61048277fbfd2d8678771fdadef582a23356aff6798430a3f97d91a5d469245e"} Dec 05 01:23:07 crc kubenswrapper[4665]: I1205 01:23:07.409466 4665 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 01:23:14 crc kubenswrapper[4665]: I1205 01:23:14.922113 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:23:14 crc kubenswrapper[4665]: I1205 01:23:14.922971 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:23:44 crc kubenswrapper[4665]: I1205 01:23:44.922562 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:23:44 crc kubenswrapper[4665]: I1205 01:23:44.923112 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.301452 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pv2mm"] Dec 05 01:23:57 crc kubenswrapper[4665]: E1205 01:23:57.302137 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bef0c5a0-6fef-4199-8782-99bc78b35374" containerName="registry" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.302149 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="bef0c5a0-6fef-4199-8782-99bc78b35374" containerName="registry" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.302235 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="bef0c5a0-6fef-4199-8782-99bc78b35374" containerName="registry" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.302603 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-pv2mm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.308018 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.308455 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.309593 4665 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-hm6lg" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.319841 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-2czfg"] Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.320860 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.323608 4665 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-czbmw" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.329350 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pv2mm"] Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.340855 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-2czfg"] Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.345886 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hdlpm"] Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.346907 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hdlpm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.348367 4665 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-r28lm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.358632 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hdlpm"] Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.390244 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d7jhq\" (UniqueName: \"kubernetes.io/projected/5f586972-9546-4a85-a442-590c70b38de3-kube-api-access-d7jhq\") pod \"cert-manager-5b446d88c5-hdlpm\" (UID: \"5f586972-9546-4a85-a442-590c70b38de3\") " pod="cert-manager/cert-manager-5b446d88c5-hdlpm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.390289 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rk4l5\" (UniqueName: \"kubernetes.io/projected/7b226a2a-8fb6-4694-a1a7-9a86e3d222e9-kube-api-access-rk4l5\") pod \"cert-manager-cainjector-7f985d654d-pv2mm\" (UID: \"7b226a2a-8fb6-4694-a1a7-9a86e3d222e9\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pv2mm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.390362 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nfq2m\" (UniqueName: \"kubernetes.io/projected/7af985e3-148d-4974-b2ea-d9679063234a-kube-api-access-nfq2m\") pod \"cert-manager-webhook-5655c58dd6-2czfg\" (UID: \"7af985e3-148d-4974-b2ea-d9679063234a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.491458 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nfq2m\" (UniqueName: \"kubernetes.io/projected/7af985e3-148d-4974-b2ea-d9679063234a-kube-api-access-nfq2m\") pod \"cert-manager-webhook-5655c58dd6-2czfg\" (UID: \"7af985e3-148d-4974-b2ea-d9679063234a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.491763 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d7jhq\" (UniqueName: \"kubernetes.io/projected/5f586972-9546-4a85-a442-590c70b38de3-kube-api-access-d7jhq\") pod \"cert-manager-5b446d88c5-hdlpm\" (UID: \"5f586972-9546-4a85-a442-590c70b38de3\") " pod="cert-manager/cert-manager-5b446d88c5-hdlpm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.491887 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rk4l5\" (UniqueName: \"kubernetes.io/projected/7b226a2a-8fb6-4694-a1a7-9a86e3d222e9-kube-api-access-rk4l5\") pod \"cert-manager-cainjector-7f985d654d-pv2mm\" (UID: \"7b226a2a-8fb6-4694-a1a7-9a86e3d222e9\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pv2mm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.510828 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d7jhq\" (UniqueName: \"kubernetes.io/projected/5f586972-9546-4a85-a442-590c70b38de3-kube-api-access-d7jhq\") pod \"cert-manager-5b446d88c5-hdlpm\" (UID: \"5f586972-9546-4a85-a442-590c70b38de3\") " pod="cert-manager/cert-manager-5b446d88c5-hdlpm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.514989 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rk4l5\" (UniqueName: \"kubernetes.io/projected/7b226a2a-8fb6-4694-a1a7-9a86e3d222e9-kube-api-access-rk4l5\") pod \"cert-manager-cainjector-7f985d654d-pv2mm\" (UID: \"7b226a2a-8fb6-4694-a1a7-9a86e3d222e9\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-pv2mm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.515404 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nfq2m\" (UniqueName: \"kubernetes.io/projected/7af985e3-148d-4974-b2ea-d9679063234a-kube-api-access-nfq2m\") pod \"cert-manager-webhook-5655c58dd6-2czfg\" (UID: \"7af985e3-148d-4974-b2ea-d9679063234a\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.618568 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-pv2mm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.637775 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.665586 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-hdlpm" Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.884734 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-pv2mm"] Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.899929 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.928430 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-2czfg"] Dec 05 01:23:57 crc kubenswrapper[4665]: W1205 01:23:57.941264 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7af985e3_148d_4974_b2ea_d9679063234a.slice/crio-c5a75ca2b4a4778da9545d93f1196e985662d2a10cc7e88c2555b3b1840981d2 WatchSource:0}: Error finding container c5a75ca2b4a4778da9545d93f1196e985662d2a10cc7e88c2555b3b1840981d2: Status 404 returned error can't find the container with id c5a75ca2b4a4778da9545d93f1196e985662d2a10cc7e88c2555b3b1840981d2 Dec 05 01:23:57 crc kubenswrapper[4665]: I1205 01:23:57.967571 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-hdlpm"] Dec 05 01:23:57 crc kubenswrapper[4665]: W1205 01:23:57.974200 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f586972_9546_4a85_a442_590c70b38de3.slice/crio-cb12a58c13ff6c3de68f6911493c918d10723be4d14d40120373db8849ce6a21 WatchSource:0}: Error finding container cb12a58c13ff6c3de68f6911493c918d10723be4d14d40120373db8849ce6a21: Status 404 returned error can't find the container with id cb12a58c13ff6c3de68f6911493c918d10723be4d14d40120373db8849ce6a21 Dec 05 01:23:58 crc kubenswrapper[4665]: I1205 01:23:58.368482 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hdlpm" event={"ID":"5f586972-9546-4a85-a442-590c70b38de3","Type":"ContainerStarted","Data":"cb12a58c13ff6c3de68f6911493c918d10723be4d14d40120373db8849ce6a21"} Dec 05 01:23:58 crc kubenswrapper[4665]: I1205 01:23:58.369802 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-pv2mm" event={"ID":"7b226a2a-8fb6-4694-a1a7-9a86e3d222e9","Type":"ContainerStarted","Data":"ba9c1ac884f984fb2768f7ce82133d0a5eb45fbdae8ed5337dd53335ddedc11f"} Dec 05 01:23:58 crc kubenswrapper[4665]: I1205 01:23:58.370817 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" event={"ID":"7af985e3-148d-4974-b2ea-d9679063234a","Type":"ContainerStarted","Data":"c5a75ca2b4a4778da9545d93f1196e985662d2a10cc7e88c2555b3b1840981d2"} Dec 05 01:24:01 crc kubenswrapper[4665]: I1205 01:24:01.399283 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" event={"ID":"7af985e3-148d-4974-b2ea-d9679063234a","Type":"ContainerStarted","Data":"4874e6437f7aa93375f98da5ffea324857b698ba530e45f0ec7aef80919fc5f9"} Dec 05 01:24:01 crc kubenswrapper[4665]: I1205 01:24:01.399888 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" Dec 05 01:24:01 crc kubenswrapper[4665]: I1205 01:24:01.402145 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-hdlpm" event={"ID":"5f586972-9546-4a85-a442-590c70b38de3","Type":"ContainerStarted","Data":"50f9d270081f5fa69776bd5acc5b422cf467da32336617f9d26dfec7d7f49429"} Dec 05 01:24:01 crc kubenswrapper[4665]: I1205 01:24:01.404094 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-pv2mm" event={"ID":"7b226a2a-8fb6-4694-a1a7-9a86e3d222e9","Type":"ContainerStarted","Data":"00610781d8a91e78a7a27ee2a39da7365b319d821ae9bfdf14c089e368c96933"} Dec 05 01:24:01 crc kubenswrapper[4665]: I1205 01:24:01.429734 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" podStartSLOduration=1.431359937 podStartE2EDuration="4.429713158s" podCreationTimestamp="2025-12-05 01:23:57 +0000 UTC" firstStartedPulling="2025-12-05 01:23:57.942723535 +0000 UTC m=+813.282115834" lastFinishedPulling="2025-12-05 01:24:00.941076756 +0000 UTC m=+816.280469055" observedRunningTime="2025-12-05 01:24:01.416214746 +0000 UTC m=+816.755607055" watchObservedRunningTime="2025-12-05 01:24:01.429713158 +0000 UTC m=+816.769105457" Dec 05 01:24:01 crc kubenswrapper[4665]: I1205 01:24:01.430385 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-pv2mm" podStartSLOduration=1.38962129 podStartE2EDuration="4.430375904s" podCreationTimestamp="2025-12-05 01:23:57 +0000 UTC" firstStartedPulling="2025-12-05 01:23:57.899726028 +0000 UTC m=+813.239118327" lastFinishedPulling="2025-12-05 01:24:00.940480642 +0000 UTC m=+816.279872941" observedRunningTime="2025-12-05 01:24:01.429553014 +0000 UTC m=+816.768945323" watchObservedRunningTime="2025-12-05 01:24:01.430375904 +0000 UTC m=+816.769768203" Dec 05 01:24:01 crc kubenswrapper[4665]: I1205 01:24:01.451016 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-hdlpm" podStartSLOduration=1.480899121 podStartE2EDuration="4.451001167s" podCreationTimestamp="2025-12-05 01:23:57 +0000 UTC" firstStartedPulling="2025-12-05 01:23:57.976579844 +0000 UTC m=+813.315972143" lastFinishedPulling="2025-12-05 01:24:00.94668189 +0000 UTC m=+816.286074189" observedRunningTime="2025-12-05 01:24:01.447922383 +0000 UTC m=+816.787314682" watchObservedRunningTime="2025-12-05 01:24:01.451001167 +0000 UTC m=+816.790393466" Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.641837 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-2czfg" Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.760233 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2bmn9"] Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.760681 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovn-controller" containerID="cri-o://c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132" gracePeriod=30 Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.760743 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="nbdb" containerID="cri-o://79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336" gracePeriod=30 Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.760771 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993" gracePeriod=30 Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.760788 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="northd" containerID="cri-o://41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f" gracePeriod=30 Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.760818 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kube-rbac-proxy-node" containerID="cri-o://851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6" gracePeriod=30 Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.760875 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovn-acl-logging" containerID="cri-o://fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd" gracePeriod=30 Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.760931 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="sbdb" containerID="cri-o://f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670" gracePeriod=30 Dec 05 01:24:07 crc kubenswrapper[4665]: I1205 01:24:07.795255 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" containerID="cri-o://0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8" gracePeriod=30 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.119335 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/3.log" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.123285 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovn-acl-logging/0.log" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.124496 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovn-controller/0.log" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.125334 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.184414 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-g727j"] Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.184889 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.184905 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.184914 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovn-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.184920 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovn-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.184929 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.184936 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.184945 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="sbdb" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.184950 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="sbdb" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.184959 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="nbdb" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.184964 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="nbdb" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.184973 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovn-acl-logging" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.184978 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovn-acl-logging" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.184986 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.184992 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.185002 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kubecfg-setup" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185009 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kubecfg-setup" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.185017 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="northd" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185024 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="northd" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.185036 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185042 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.185049 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185054 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.185064 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kube-rbac-proxy-node" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185069 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kube-rbac-proxy-node" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185151 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185160 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185170 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185179 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kube-rbac-proxy-node" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185187 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="sbdb" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185196 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovn-acl-logging" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185203 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="nbdb" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185212 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovn-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185219 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185228 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="northd" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.185322 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185329 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185420 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.185428 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerName="ovnkube-controller" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.187351 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.324830 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-bin\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.324872 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-systemd\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.324895 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-slash\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.324919 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-kubelet\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.324959 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bklrv\" (UniqueName: \"kubernetes.io/projected/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-kube-api-access-bklrv\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.324981 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-log-socket\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325007 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-netns\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325075 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-ovn\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325070 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325107 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-var-lib-openvswitch\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325130 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-var-lib-cni-networks-ovn-kubernetes\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325159 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325167 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-config\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325237 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-etc-openvswitch\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325287 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-script-lib\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325492 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-log-socket" (OuterVolumeSpecName: "log-socket") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325547 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-ovn-kubernetes\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325638 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-netd\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325596 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325736 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325724 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-systemd-units\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325629 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325628 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-slash" (OuterVolumeSpecName: "host-slash") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325672 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325694 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325791 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-openvswitch\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325852 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovn-node-metrics-cert\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325697 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325703 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325818 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325943 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-env-overrides\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.325986 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-node-log\") pod \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\" (UID: \"76af84f2-4935-4e7f-8fc6-b51adcfeebc4\") " Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326167 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326236 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-etc-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326248 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-node-log" (OuterVolumeSpecName: "node-log") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326333 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-var-lib-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326463 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-649fp\" (UniqueName: \"kubernetes.io/projected/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-kube-api-access-649fp\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326562 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-node-log\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326655 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-slash\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326475 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326558 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326553 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326780 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-cni-bin\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.326947 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-systemd-units\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327018 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-ovn\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327084 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovn-node-metrics-cert\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327148 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovnkube-script-lib\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327227 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-env-overrides\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327317 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-run-ovn-kubernetes\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327408 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-kubelet\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327486 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovnkube-config\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327555 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-log-socket\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327698 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-run-netns\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327807 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-cni-netd\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.327937 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328006 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-systemd\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328073 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328170 4665 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328228 4665 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328307 4665 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328419 4665 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328479 4665 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328533 4665 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328588 4665 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328636 4665 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328684 4665 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328761 4665 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328813 4665 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328852 4665 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328878 4665 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328891 4665 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328902 4665 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328914 4665 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.328926 4665 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.332234 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-kube-api-access-bklrv" (OuterVolumeSpecName: "kube-api-access-bklrv") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "kube-api-access-bklrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.332762 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.347931 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "76af84f2-4935-4e7f-8fc6-b51adcfeebc4" (UID: "76af84f2-4935-4e7f-8fc6-b51adcfeebc4"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430381 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-etc-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430516 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-var-lib-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430551 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-649fp\" (UniqueName: \"kubernetes.io/projected/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-kube-api-access-649fp\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430634 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-node-log\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430713 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-slash\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430786 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-cni-bin\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430852 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-systemd-units\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430893 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-ovn\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430964 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovn-node-metrics-cert\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431018 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-node-log\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.430997 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovnkube-script-lib\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431082 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-var-lib-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431185 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-env-overrides\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431226 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-run-ovn-kubernetes\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431272 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-kubelet\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431308 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovnkube-config\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431341 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-log-socket\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431375 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-run-netns\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431392 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-cni-netd\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431433 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431488 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-slash\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431495 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-systemd\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431516 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-systemd\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431541 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-log-socket\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431561 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-run-netns\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431559 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431457 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-systemd-units\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431596 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431606 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-cni-bin\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431611 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431614 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-etc-openvswitch\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431639 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-run-ovn\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431649 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-cni-netd\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431687 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-run-ovn-kubernetes\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431717 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-host-kubelet\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431754 4665 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431766 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bklrv\" (UniqueName: \"kubernetes.io/projected/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-kube-api-access-bklrv\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.431779 4665 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/76af84f2-4935-4e7f-8fc6-b51adcfeebc4-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.432146 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-env-overrides\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.432252 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovnkube-config\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.432340 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovnkube-script-lib\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.434532 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-ovn-node-metrics-cert\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.446169 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/2.log" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.446661 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/1.log" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.446705 4665 generic.go:334] "Generic (PLEG): container finished" podID="be34b4a6-0156-4e21-bae6-12af18583b0d" containerID="95adcd9946fd2ca659a80fe447f9f72ba2ec042197f5e19940a5717bb7a4a837" exitCode=2 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.446788 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j22m9" event={"ID":"be34b4a6-0156-4e21-bae6-12af18583b0d","Type":"ContainerDied","Data":"95adcd9946fd2ca659a80fe447f9f72ba2ec042197f5e19940a5717bb7a4a837"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.446823 4665 scope.go:117] "RemoveContainer" containerID="064bac400c3e094a4e3864b57ff11e28c7c0b35c66a359a07995c9d42ba7c068" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.448940 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-649fp\" (UniqueName: \"kubernetes.io/projected/640e5c29-cc8b-4d04-87dc-805aa4e2ae3d-kube-api-access-649fp\") pod \"ovnkube-node-g727j\" (UID: \"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d\") " pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.449176 4665 scope.go:117] "RemoveContainer" containerID="95adcd9946fd2ca659a80fe447f9f72ba2ec042197f5e19940a5717bb7a4a837" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.450146 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovnkube-controller/3.log" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.453931 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovn-acl-logging/0.log" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.454674 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-2bmn9_76af84f2-4935-4e7f-8fc6-b51adcfeebc4/ovn-controller/0.log" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458649 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8" exitCode=0 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458676 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670" exitCode=0 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458684 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336" exitCode=0 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458691 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f" exitCode=0 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458698 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993" exitCode=0 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458706 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6" exitCode=0 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458715 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd" exitCode=143 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458722 4665 generic.go:334] "Generic (PLEG): container finished" podID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" containerID="c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132" exitCode=143 Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458740 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458765 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458776 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458785 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458795 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458805 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458816 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458825 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458831 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458836 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458840 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458845 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458850 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458855 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458860 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458865 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458872 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458880 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458887 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458891 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458896 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458901 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458907 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458911 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458916 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458921 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458926 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458932 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458939 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458947 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458952 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458957 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458963 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458968 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458973 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458978 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458983 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458987 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.458994 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" event={"ID":"76af84f2-4935-4e7f-8fc6-b51adcfeebc4","Type":"ContainerDied","Data":"7dfc40eeea11eac4d19ef0cef9577075dab1a931800b72d63d2d8edd732d9ff5"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459001 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459007 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459012 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459016 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459021 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459026 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459031 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459036 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459040 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459045 4665 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.459140 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-2bmn9" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.472490 4665 scope.go:117] "RemoveContainer" containerID="0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.500482 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.504433 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.507407 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2bmn9"] Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.512809 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-2bmn9"] Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.540586 4665 scope.go:117] "RemoveContainer" containerID="f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.556985 4665 scope.go:117] "RemoveContainer" containerID="79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.570490 4665 scope.go:117] "RemoveContainer" containerID="41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.588814 4665 scope.go:117] "RemoveContainer" containerID="da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.606522 4665 scope.go:117] "RemoveContainer" containerID="851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.623778 4665 scope.go:117] "RemoveContainer" containerID="fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.638034 4665 scope.go:117] "RemoveContainer" containerID="c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.704418 4665 scope.go:117] "RemoveContainer" containerID="feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.726951 4665 scope.go:117] "RemoveContainer" containerID="0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.727828 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": container with ID starting with 0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8 not found: ID does not exist" containerID="0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.727891 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} err="failed to get container status \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": rpc error: code = NotFound desc = could not find container \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": container with ID starting with 0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.727931 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.728640 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": container with ID starting with 56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e not found: ID does not exist" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.728672 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} err="failed to get container status \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": rpc error: code = NotFound desc = could not find container \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": container with ID starting with 56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.728691 4665 scope.go:117] "RemoveContainer" containerID="f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.729098 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": container with ID starting with f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670 not found: ID does not exist" containerID="f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.729146 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} err="failed to get container status \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": rpc error: code = NotFound desc = could not find container \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": container with ID starting with f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.729165 4665 scope.go:117] "RemoveContainer" containerID="79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.729579 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": container with ID starting with 79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336 not found: ID does not exist" containerID="79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.729610 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} err="failed to get container status \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": rpc error: code = NotFound desc = could not find container \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": container with ID starting with 79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.729627 4665 scope.go:117] "RemoveContainer" containerID="41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.730162 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": container with ID starting with 41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f not found: ID does not exist" containerID="41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.730191 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} err="failed to get container status \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": rpc error: code = NotFound desc = could not find container \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": container with ID starting with 41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.730212 4665 scope.go:117] "RemoveContainer" containerID="da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.730468 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": container with ID starting with da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993 not found: ID does not exist" containerID="da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.730496 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} err="failed to get container status \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": rpc error: code = NotFound desc = could not find container \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": container with ID starting with da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.730513 4665 scope.go:117] "RemoveContainer" containerID="851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.730984 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": container with ID starting with 851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6 not found: ID does not exist" containerID="851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.731010 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} err="failed to get container status \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": rpc error: code = NotFound desc = could not find container \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": container with ID starting with 851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.731028 4665 scope.go:117] "RemoveContainer" containerID="fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.731319 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": container with ID starting with fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd not found: ID does not exist" containerID="fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.731340 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} err="failed to get container status \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": rpc error: code = NotFound desc = could not find container \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": container with ID starting with fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.731361 4665 scope.go:117] "RemoveContainer" containerID="c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.731536 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": container with ID starting with c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132 not found: ID does not exist" containerID="c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.731556 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} err="failed to get container status \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": rpc error: code = NotFound desc = could not find container \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": container with ID starting with c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.731572 4665 scope.go:117] "RemoveContainer" containerID="feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f" Dec 05 01:24:08 crc kubenswrapper[4665]: E1205 01:24:08.731759 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": container with ID starting with feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f not found: ID does not exist" containerID="feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.731780 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} err="failed to get container status \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": rpc error: code = NotFound desc = could not find container \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": container with ID starting with feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.731798 4665 scope.go:117] "RemoveContainer" containerID="0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.732043 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} err="failed to get container status \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": rpc error: code = NotFound desc = could not find container \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": container with ID starting with 0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.732070 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.732333 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} err="failed to get container status \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": rpc error: code = NotFound desc = could not find container \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": container with ID starting with 56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.732366 4665 scope.go:117] "RemoveContainer" containerID="f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.732574 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} err="failed to get container status \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": rpc error: code = NotFound desc = could not find container \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": container with ID starting with f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.732603 4665 scope.go:117] "RemoveContainer" containerID="79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.732789 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} err="failed to get container status \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": rpc error: code = NotFound desc = could not find container \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": container with ID starting with 79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.732817 4665 scope.go:117] "RemoveContainer" containerID="41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733004 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} err="failed to get container status \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": rpc error: code = NotFound desc = could not find container \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": container with ID starting with 41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733036 4665 scope.go:117] "RemoveContainer" containerID="da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733227 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} err="failed to get container status \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": rpc error: code = NotFound desc = could not find container \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": container with ID starting with da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733248 4665 scope.go:117] "RemoveContainer" containerID="851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733427 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} err="failed to get container status \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": rpc error: code = NotFound desc = could not find container \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": container with ID starting with 851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733457 4665 scope.go:117] "RemoveContainer" containerID="fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733612 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} err="failed to get container status \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": rpc error: code = NotFound desc = could not find container \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": container with ID starting with fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733632 4665 scope.go:117] "RemoveContainer" containerID="c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733780 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} err="failed to get container status \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": rpc error: code = NotFound desc = could not find container \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": container with ID starting with c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733801 4665 scope.go:117] "RemoveContainer" containerID="feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733953 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} err="failed to get container status \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": rpc error: code = NotFound desc = could not find container \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": container with ID starting with feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.733974 4665 scope.go:117] "RemoveContainer" containerID="0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734151 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} err="failed to get container status \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": rpc error: code = NotFound desc = could not find container \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": container with ID starting with 0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734176 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734423 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} err="failed to get container status \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": rpc error: code = NotFound desc = could not find container \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": container with ID starting with 56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734452 4665 scope.go:117] "RemoveContainer" containerID="f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734632 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} err="failed to get container status \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": rpc error: code = NotFound desc = could not find container \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": container with ID starting with f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734655 4665 scope.go:117] "RemoveContainer" containerID="79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734809 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} err="failed to get container status \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": rpc error: code = NotFound desc = could not find container \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": container with ID starting with 79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734830 4665 scope.go:117] "RemoveContainer" containerID="41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.734989 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} err="failed to get container status \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": rpc error: code = NotFound desc = could not find container \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": container with ID starting with 41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735011 4665 scope.go:117] "RemoveContainer" containerID="da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735200 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} err="failed to get container status \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": rpc error: code = NotFound desc = could not find container \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": container with ID starting with da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735222 4665 scope.go:117] "RemoveContainer" containerID="851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735414 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} err="failed to get container status \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": rpc error: code = NotFound desc = could not find container \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": container with ID starting with 851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735435 4665 scope.go:117] "RemoveContainer" containerID="fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735586 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} err="failed to get container status \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": rpc error: code = NotFound desc = could not find container \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": container with ID starting with fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735608 4665 scope.go:117] "RemoveContainer" containerID="c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735772 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} err="failed to get container status \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": rpc error: code = NotFound desc = could not find container \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": container with ID starting with c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.735799 4665 scope.go:117] "RemoveContainer" containerID="feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736000 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} err="failed to get container status \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": rpc error: code = NotFound desc = could not find container \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": container with ID starting with feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736022 4665 scope.go:117] "RemoveContainer" containerID="0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736184 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8"} err="failed to get container status \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": rpc error: code = NotFound desc = could not find container \"0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8\": container with ID starting with 0027a9ba4c03d2c056cfad96ae63503320da7d9d93a2370783f5ffed36e913b8 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736206 4665 scope.go:117] "RemoveContainer" containerID="56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736376 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e"} err="failed to get container status \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": rpc error: code = NotFound desc = could not find container \"56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e\": container with ID starting with 56f2df7f369a50d0f098eba8acd4600e3b98f6853a38df8a0dcf1952a971334e not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736398 4665 scope.go:117] "RemoveContainer" containerID="f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736562 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670"} err="failed to get container status \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": rpc error: code = NotFound desc = could not find container \"f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670\": container with ID starting with f96b55183d8391b75e13ba0a8e5cd798d03d6baa36b6e0af503bc04d2afa1670 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736584 4665 scope.go:117] "RemoveContainer" containerID="79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736744 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336"} err="failed to get container status \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": rpc error: code = NotFound desc = could not find container \"79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336\": container with ID starting with 79bd0809186f090ce3a30729f417adc4a3a4a84687c02daa306489e2b6c51336 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736765 4665 scope.go:117] "RemoveContainer" containerID="41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736919 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f"} err="failed to get container status \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": rpc error: code = NotFound desc = could not find container \"41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f\": container with ID starting with 41f060272f708c3001228f478fb7936308870eeaed5175c94d49ea99e317048f not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.736940 4665 scope.go:117] "RemoveContainer" containerID="da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737095 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993"} err="failed to get container status \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": rpc error: code = NotFound desc = could not find container \"da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993\": container with ID starting with da0eeadd9327374cee34808a5d5f72e0189badd19b7f187ddd747e6e2bc61993 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737115 4665 scope.go:117] "RemoveContainer" containerID="851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737263 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6"} err="failed to get container status \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": rpc error: code = NotFound desc = could not find container \"851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6\": container with ID starting with 851e6133175198f1d0c064eda8bbd724f0f66af2e43d6480584dfddceaef67b6 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737285 4665 scope.go:117] "RemoveContainer" containerID="fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737450 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd"} err="failed to get container status \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": rpc error: code = NotFound desc = could not find container \"fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd\": container with ID starting with fb0d24f2d00b5302b2f6a22c0aac39b11ba3df126697d05d6653fb9ce70f96bd not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737471 4665 scope.go:117] "RemoveContainer" containerID="c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737616 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132"} err="failed to get container status \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": rpc error: code = NotFound desc = could not find container \"c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132\": container with ID starting with c44d2b43dd8de25a1fdd2e561a1240a635631e51a4c8140103f6c346bcd3e132 not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737637 4665 scope.go:117] "RemoveContainer" containerID="feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.737787 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f"} err="failed to get container status \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": rpc error: code = NotFound desc = could not find container \"feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f\": container with ID starting with feb43bb8053e34caaed2f72905bbf8c5e3b1644b1627ced517338a32a188fa9f not found: ID does not exist" Dec 05 01:24:08 crc kubenswrapper[4665]: I1205 01:24:08.902134 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76af84f2-4935-4e7f-8fc6-b51adcfeebc4" path="/var/lib/kubelet/pods/76af84f2-4935-4e7f-8fc6-b51adcfeebc4/volumes" Dec 05 01:24:09 crc kubenswrapper[4665]: I1205 01:24:09.464031 4665 generic.go:334] "Generic (PLEG): container finished" podID="640e5c29-cc8b-4d04-87dc-805aa4e2ae3d" containerID="b5fcdffad647e0397b667a39f6d95acf5341b6897c1aedf6943394d03f572f0e" exitCode=0 Dec 05 01:24:09 crc kubenswrapper[4665]: I1205 01:24:09.464096 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerDied","Data":"b5fcdffad647e0397b667a39f6d95acf5341b6897c1aedf6943394d03f572f0e"} Dec 05 01:24:09 crc kubenswrapper[4665]: I1205 01:24:09.464128 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"754c5761b9dd1824e4dee8b858668912383791bc407138bb911245f9631a80a0"} Dec 05 01:24:09 crc kubenswrapper[4665]: I1205 01:24:09.467669 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-j22m9_be34b4a6-0156-4e21-bae6-12af18583b0d/kube-multus/2.log" Dec 05 01:24:09 crc kubenswrapper[4665]: I1205 01:24:09.467849 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-j22m9" event={"ID":"be34b4a6-0156-4e21-bae6-12af18583b0d","Type":"ContainerStarted","Data":"6be0916929dd289ef653822abdf2be59a1dbe5b0f1ac3b7c4f582709d7ffe29f"} Dec 05 01:24:10 crc kubenswrapper[4665]: I1205 01:24:10.478864 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"315bb2c55784d0334639a4757d93d36a76bd487e2637896101bb79267cd48bd2"} Dec 05 01:24:10 crc kubenswrapper[4665]: I1205 01:24:10.479267 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"1e4165f7ce6f59c9d141b52c31d3ecc12ba8b50f628a680f33324e2a904aa81b"} Dec 05 01:24:10 crc kubenswrapper[4665]: I1205 01:24:10.479280 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"029655c38f9df0737c7b3a71727b1ce43b8585e3ec79c4191120e6b0863e2189"} Dec 05 01:24:10 crc kubenswrapper[4665]: I1205 01:24:10.479289 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"3c40517aebda4033698432d7ce2131ef831d8169e8cc2ae8271fdcc161ad2e3f"} Dec 05 01:24:10 crc kubenswrapper[4665]: I1205 01:24:10.479337 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"65a290e72925efdc0ef6cf5a22db63bc2740aa003b31c1774817721236d4597b"} Dec 05 01:24:10 crc kubenswrapper[4665]: I1205 01:24:10.479345 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"51fc571678a4f971976ee09c25c2ad27369436c1fb08e6781654ef87bdb2ee8a"} Dec 05 01:24:12 crc kubenswrapper[4665]: I1205 01:24:12.492751 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"e9313671b4b147ccfa12086387ea0e85863795bf7563f6071c50f091d70837b6"} Dec 05 01:24:14 crc kubenswrapper[4665]: I1205 01:24:14.922175 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:24:14 crc kubenswrapper[4665]: I1205 01:24:14.922753 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:24:14 crc kubenswrapper[4665]: I1205 01:24:14.922794 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:24:14 crc kubenswrapper[4665]: I1205 01:24:14.923347 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"61048277fbfd2d8678771fdadef582a23356aff6798430a3f97d91a5d469245e"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:24:14 crc kubenswrapper[4665]: I1205 01:24:14.923409 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://61048277fbfd2d8678771fdadef582a23356aff6798430a3f97d91a5d469245e" gracePeriod=600 Dec 05 01:24:15 crc kubenswrapper[4665]: I1205 01:24:15.521550 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" event={"ID":"640e5c29-cc8b-4d04-87dc-805aa4e2ae3d","Type":"ContainerStarted","Data":"30a999c2ddb57b9623b18239674d2dcd5ebb8ce939e0b37fbe3a6c583244c873"} Dec 05 01:24:15 crc kubenswrapper[4665]: I1205 01:24:15.521826 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:15 crc kubenswrapper[4665]: I1205 01:24:15.548507 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" podStartSLOduration=7.548491853 podStartE2EDuration="7.548491853s" podCreationTimestamp="2025-12-05 01:24:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:24:15.547417827 +0000 UTC m=+830.886810136" watchObservedRunningTime="2025-12-05 01:24:15.548491853 +0000 UTC m=+830.887884152" Dec 05 01:24:15 crc kubenswrapper[4665]: I1205 01:24:15.553626 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:16 crc kubenswrapper[4665]: I1205 01:24:16.528100 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:16 crc kubenswrapper[4665]: I1205 01:24:16.528238 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:16 crc kubenswrapper[4665]: I1205 01:24:16.551662 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:18 crc kubenswrapper[4665]: I1205 01:24:18.538934 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="61048277fbfd2d8678771fdadef582a23356aff6798430a3f97d91a5d469245e" exitCode=0 Dec 05 01:24:18 crc kubenswrapper[4665]: I1205 01:24:18.538991 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"61048277fbfd2d8678771fdadef582a23356aff6798430a3f97d91a5d469245e"} Dec 05 01:24:18 crc kubenswrapper[4665]: I1205 01:24:18.539213 4665 scope.go:117] "RemoveContainer" containerID="2cde39d7d4a0201ce1c07c9117b9e415325e344e62e60943726ee18c016a15d3" Dec 05 01:24:19 crc kubenswrapper[4665]: I1205 01:24:19.547016 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"1cd3d1505d7ab823a5b8b16aa7787ab1595f7aa23355a3b5bb9a7a6dd4cb7347"} Dec 05 01:24:38 crc kubenswrapper[4665]: I1205 01:24:38.537904 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-g727j" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.172090 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn"] Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.174529 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.177850 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.184001 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn"] Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.342474 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.342514 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.342538 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prtb7\" (UniqueName: \"kubernetes.io/projected/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-kube-api-access-prtb7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.443267 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.443333 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.443366 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prtb7\" (UniqueName: \"kubernetes.io/projected/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-kube-api-access-prtb7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.444201 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.444245 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.463466 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prtb7\" (UniqueName: \"kubernetes.io/projected/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-kube-api-access-prtb7\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.497456 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:24:54 crc kubenswrapper[4665]: I1205 01:24:54.920821 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn"] Dec 05 01:24:55 crc kubenswrapper[4665]: I1205 01:24:55.727019 4665 generic.go:334] "Generic (PLEG): container finished" podID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerID="329315d685fe8905d4e470616386c2017ea9268c4cff511fd954f8cd2c96c4a0" exitCode=0 Dec 05 01:24:55 crc kubenswrapper[4665]: I1205 01:24:55.727059 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" event={"ID":"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa","Type":"ContainerDied","Data":"329315d685fe8905d4e470616386c2017ea9268c4cff511fd954f8cd2c96c4a0"} Dec 05 01:24:55 crc kubenswrapper[4665]: I1205 01:24:55.727316 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" event={"ID":"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa","Type":"ContainerStarted","Data":"b73de8d4c9a402617540f805ea15b8066c65f7514d2b678c473cd7e5101241dc"} Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.534946 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q84b5"] Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.535947 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.549964 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q84b5"] Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.566893 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-catalog-content\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.567031 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r8ggg\" (UniqueName: \"kubernetes.io/projected/c0d9247c-6408-4079-853c-25e65bc59cea-kube-api-access-r8ggg\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.567058 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-utilities\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.668195 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-catalog-content\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.668339 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r8ggg\" (UniqueName: \"kubernetes.io/projected/c0d9247c-6408-4079-853c-25e65bc59cea-kube-api-access-r8ggg\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.668373 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-utilities\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.668750 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-catalog-content\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.668864 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-utilities\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.685877 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r8ggg\" (UniqueName: \"kubernetes.io/projected/c0d9247c-6408-4079-853c-25e65bc59cea-kube-api-access-r8ggg\") pod \"redhat-operators-q84b5\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:56 crc kubenswrapper[4665]: I1205 01:24:56.852564 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:24:57 crc kubenswrapper[4665]: I1205 01:24:57.135779 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q84b5"] Dec 05 01:24:57 crc kubenswrapper[4665]: I1205 01:24:57.740680 4665 generic.go:334] "Generic (PLEG): container finished" podID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerID="577401260ba74dc7232df0bf4f0054bef0a02d28c03150064aae31e9129f7f93" exitCode=0 Dec 05 01:24:57 crc kubenswrapper[4665]: I1205 01:24:57.740757 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" event={"ID":"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa","Type":"ContainerDied","Data":"577401260ba74dc7232df0bf4f0054bef0a02d28c03150064aae31e9129f7f93"} Dec 05 01:24:57 crc kubenswrapper[4665]: I1205 01:24:57.742106 4665 generic.go:334] "Generic (PLEG): container finished" podID="c0d9247c-6408-4079-853c-25e65bc59cea" containerID="877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4" exitCode=0 Dec 05 01:24:57 crc kubenswrapper[4665]: I1205 01:24:57.742137 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q84b5" event={"ID":"c0d9247c-6408-4079-853c-25e65bc59cea","Type":"ContainerDied","Data":"877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4"} Dec 05 01:24:57 crc kubenswrapper[4665]: I1205 01:24:57.742157 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q84b5" event={"ID":"c0d9247c-6408-4079-853c-25e65bc59cea","Type":"ContainerStarted","Data":"31947e2d7ea7a7ee62f0fbef23a3de78469ad35f6e71a4e13c084d31266e21fe"} Dec 05 01:24:58 crc kubenswrapper[4665]: I1205 01:24:58.750390 4665 generic.go:334] "Generic (PLEG): container finished" podID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerID="c2f8bd179cb10db062475435eb850a38a7e2e0aef17f4507d684149ad88f7aa2" exitCode=0 Dec 05 01:24:58 crc kubenswrapper[4665]: I1205 01:24:58.750461 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" event={"ID":"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa","Type":"ContainerDied","Data":"c2f8bd179cb10db062475435eb850a38a7e2e0aef17f4507d684149ad88f7aa2"} Dec 05 01:24:58 crc kubenswrapper[4665]: I1205 01:24:58.753362 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q84b5" event={"ID":"c0d9247c-6408-4079-853c-25e65bc59cea","Type":"ContainerStarted","Data":"c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481"} Dec 05 01:24:59 crc kubenswrapper[4665]: I1205 01:24:59.761040 4665 generic.go:334] "Generic (PLEG): container finished" podID="c0d9247c-6408-4079-853c-25e65bc59cea" containerID="c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481" exitCode=0 Dec 05 01:24:59 crc kubenswrapper[4665]: I1205 01:24:59.761127 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q84b5" event={"ID":"c0d9247c-6408-4079-853c-25e65bc59cea","Type":"ContainerDied","Data":"c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481"} Dec 05 01:24:59 crc kubenswrapper[4665]: I1205 01:24:59.982367 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.029748 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-bundle\") pod \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.029801 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-prtb7\" (UniqueName: \"kubernetes.io/projected/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-kube-api-access-prtb7\") pod \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.029830 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-util\") pod \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\" (UID: \"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa\") " Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.030454 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-bundle" (OuterVolumeSpecName: "bundle") pod "3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" (UID: "3d279a3e-be32-4a4d-bf07-54cb8f3b2efa"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.037315 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-kube-api-access-prtb7" (OuterVolumeSpecName: "kube-api-access-prtb7") pod "3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" (UID: "3d279a3e-be32-4a4d-bf07-54cb8f3b2efa"). InnerVolumeSpecName "kube-api-access-prtb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.046589 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-util" (OuterVolumeSpecName: "util") pod "3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" (UID: "3d279a3e-be32-4a4d-bf07-54cb8f3b2efa"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.131123 4665 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.131173 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-prtb7\" (UniqueName: \"kubernetes.io/projected/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-kube-api-access-prtb7\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.131188 4665 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3d279a3e-be32-4a4d-bf07-54cb8f3b2efa-util\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.771466 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.771444 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn" event={"ID":"3d279a3e-be32-4a4d-bf07-54cb8f3b2efa","Type":"ContainerDied","Data":"b73de8d4c9a402617540f805ea15b8066c65f7514d2b678c473cd7e5101241dc"} Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.771891 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b73de8d4c9a402617540f805ea15b8066c65f7514d2b678c473cd7e5101241dc" Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.773451 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q84b5" event={"ID":"c0d9247c-6408-4079-853c-25e65bc59cea","Type":"ContainerStarted","Data":"ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669"} Dec 05 01:25:00 crc kubenswrapper[4665]: I1205 01:25:00.798090 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q84b5" podStartSLOduration=2.386729581 podStartE2EDuration="4.798072905s" podCreationTimestamp="2025-12-05 01:24:56 +0000 UTC" firstStartedPulling="2025-12-05 01:24:57.743274704 +0000 UTC m=+873.082667003" lastFinishedPulling="2025-12-05 01:25:00.154618028 +0000 UTC m=+875.494010327" observedRunningTime="2025-12-05 01:25:00.792376016 +0000 UTC m=+876.131768375" watchObservedRunningTime="2025-12-05 01:25:00.798072905 +0000 UTC m=+876.137465204" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.744277 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk"] Dec 05 01:25:05 crc kubenswrapper[4665]: E1205 01:25:05.744863 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerName="extract" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.744877 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerName="extract" Dec 05 01:25:05 crc kubenswrapper[4665]: E1205 01:25:05.744890 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerName="util" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.744896 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerName="util" Dec 05 01:25:05 crc kubenswrapper[4665]: E1205 01:25:05.744910 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerName="pull" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.744916 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerName="pull" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.745015 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d279a3e-be32-4a4d-bf07-54cb8f3b2efa" containerName="extract" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.746520 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.752354 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.752822 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-fj42q" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.753075 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.765001 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk"] Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.894986 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5qj5\" (UniqueName: \"kubernetes.io/projected/1eb252f2-27b1-4cd9-be84-4183313f0710-kube-api-access-t5qj5\") pod \"nmstate-operator-5b5b58f5c8-7mmkk\" (UID: \"1eb252f2-27b1-4cd9-be84-4183313f0710\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk" Dec 05 01:25:05 crc kubenswrapper[4665]: I1205 01:25:05.996583 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5qj5\" (UniqueName: \"kubernetes.io/projected/1eb252f2-27b1-4cd9-be84-4183313f0710-kube-api-access-t5qj5\") pod \"nmstate-operator-5b5b58f5c8-7mmkk\" (UID: \"1eb252f2-27b1-4cd9-be84-4183313f0710\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.014982 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5qj5\" (UniqueName: \"kubernetes.io/projected/1eb252f2-27b1-4cd9-be84-4183313f0710-kube-api-access-t5qj5\") pod \"nmstate-operator-5b5b58f5c8-7mmkk\" (UID: \"1eb252f2-27b1-4cd9-be84-4183313f0710\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.062794 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.144203 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-v7kmp"] Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.145715 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.154242 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-v7kmp"] Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.290671 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk"] Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.299561 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-utilities\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.299770 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-catalog-content\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.299805 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vk22g\" (UniqueName: \"kubernetes.io/projected/4308dc80-0ff6-4591-b5f2-09962e3ea32d-kube-api-access-vk22g\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.401393 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-utilities\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.401459 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-catalog-content\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.401487 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vk22g\" (UniqueName: \"kubernetes.io/projected/4308dc80-0ff6-4591-b5f2-09962e3ea32d-kube-api-access-vk22g\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.402110 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-utilities\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.402170 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-catalog-content\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.419344 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vk22g\" (UniqueName: \"kubernetes.io/projected/4308dc80-0ff6-4591-b5f2-09962e3ea32d-kube-api-access-vk22g\") pod \"redhat-marketplace-v7kmp\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.463186 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.749046 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-v7kmp"] Dec 05 01:25:06 crc kubenswrapper[4665]: W1205 01:25:06.755878 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4308dc80_0ff6_4591_b5f2_09962e3ea32d.slice/crio-e3d2f59de765fff3b95c5aaa39a85a26551858dd7493e874c9c0c39d0eb5c76d WatchSource:0}: Error finding container e3d2f59de765fff3b95c5aaa39a85a26551858dd7493e874c9c0c39d0eb5c76d: Status 404 returned error can't find the container with id e3d2f59de765fff3b95c5aaa39a85a26551858dd7493e874c9c0c39d0eb5c76d Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.805229 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v7kmp" event={"ID":"4308dc80-0ff6-4591-b5f2-09962e3ea32d","Type":"ContainerStarted","Data":"e3d2f59de765fff3b95c5aaa39a85a26551858dd7493e874c9c0c39d0eb5c76d"} Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.806708 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk" event={"ID":"1eb252f2-27b1-4cd9-be84-4183313f0710","Type":"ContainerStarted","Data":"1b5a04b9f7922b4cd34008b755d3418c8a0f068219db2677336b005205199210"} Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.853388 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.853665 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:25:06 crc kubenswrapper[4665]: I1205 01:25:06.890815 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:25:07 crc kubenswrapper[4665]: I1205 01:25:07.814521 4665 generic.go:334] "Generic (PLEG): container finished" podID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerID="45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e" exitCode=0 Dec 05 01:25:07 crc kubenswrapper[4665]: I1205 01:25:07.815008 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v7kmp" event={"ID":"4308dc80-0ff6-4591-b5f2-09962e3ea32d","Type":"ContainerDied","Data":"45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e"} Dec 05 01:25:07 crc kubenswrapper[4665]: I1205 01:25:07.902139 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:25:08 crc kubenswrapper[4665]: I1205 01:25:08.823518 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk" event={"ID":"1eb252f2-27b1-4cd9-be84-4183313f0710","Type":"ContainerStarted","Data":"f3b032f99b4654c0aeb63ad7053360ddd31a3a814c1dea56f81edba3cd86fdc2"} Dec 05 01:25:08 crc kubenswrapper[4665]: I1205 01:25:08.855268 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-7mmkk" podStartSLOduration=1.6561373069999998 podStartE2EDuration="3.855220211s" podCreationTimestamp="2025-12-05 01:25:05 +0000 UTC" firstStartedPulling="2025-12-05 01:25:06.298634841 +0000 UTC m=+881.638027140" lastFinishedPulling="2025-12-05 01:25:08.497717745 +0000 UTC m=+883.837110044" observedRunningTime="2025-12-05 01:25:08.838998803 +0000 UTC m=+884.178391142" watchObservedRunningTime="2025-12-05 01:25:08.855220211 +0000 UTC m=+884.194612510" Dec 05 01:25:09 crc kubenswrapper[4665]: I1205 01:25:09.832244 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v7kmp" event={"ID":"4308dc80-0ff6-4591-b5f2-09962e3ea32d","Type":"ContainerStarted","Data":"69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372"} Dec 05 01:25:10 crc kubenswrapper[4665]: I1205 01:25:10.523747 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q84b5"] Dec 05 01:25:10 crc kubenswrapper[4665]: I1205 01:25:10.837270 4665 generic.go:334] "Generic (PLEG): container finished" podID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerID="69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372" exitCode=0 Dec 05 01:25:10 crc kubenswrapper[4665]: I1205 01:25:10.837344 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v7kmp" event={"ID":"4308dc80-0ff6-4591-b5f2-09962e3ea32d","Type":"ContainerDied","Data":"69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372"} Dec 05 01:25:10 crc kubenswrapper[4665]: I1205 01:25:10.838788 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-q84b5" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" containerName="registry-server" containerID="cri-o://ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669" gracePeriod=2 Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.674462 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.774139 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-catalog-content\") pod \"c0d9247c-6408-4079-853c-25e65bc59cea\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.774234 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-utilities\") pod \"c0d9247c-6408-4079-853c-25e65bc59cea\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.774275 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r8ggg\" (UniqueName: \"kubernetes.io/projected/c0d9247c-6408-4079-853c-25e65bc59cea-kube-api-access-r8ggg\") pod \"c0d9247c-6408-4079-853c-25e65bc59cea\" (UID: \"c0d9247c-6408-4079-853c-25e65bc59cea\") " Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.775060 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-utilities" (OuterVolumeSpecName: "utilities") pod "c0d9247c-6408-4079-853c-25e65bc59cea" (UID: "c0d9247c-6408-4079-853c-25e65bc59cea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.784469 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0d9247c-6408-4079-853c-25e65bc59cea-kube-api-access-r8ggg" (OuterVolumeSpecName: "kube-api-access-r8ggg") pod "c0d9247c-6408-4079-853c-25e65bc59cea" (UID: "c0d9247c-6408-4079-853c-25e65bc59cea"). InnerVolumeSpecName "kube-api-access-r8ggg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.844588 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v7kmp" event={"ID":"4308dc80-0ff6-4591-b5f2-09962e3ea32d","Type":"ContainerStarted","Data":"c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc"} Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.846761 4665 generic.go:334] "Generic (PLEG): container finished" podID="c0d9247c-6408-4079-853c-25e65bc59cea" containerID="ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669" exitCode=0 Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.846789 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q84b5" event={"ID":"c0d9247c-6408-4079-853c-25e65bc59cea","Type":"ContainerDied","Data":"ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669"} Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.846930 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q84b5" event={"ID":"c0d9247c-6408-4079-853c-25e65bc59cea","Type":"ContainerDied","Data":"31947e2d7ea7a7ee62f0fbef23a3de78469ad35f6e71a4e13c084d31266e21fe"} Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.846993 4665 scope.go:117] "RemoveContainer" containerID="ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.846812 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q84b5" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.866457 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-v7kmp" podStartSLOduration=3.063763035 podStartE2EDuration="5.866436073s" podCreationTimestamp="2025-12-05 01:25:06 +0000 UTC" firstStartedPulling="2025-12-05 01:25:08.435562994 +0000 UTC m=+883.774955293" lastFinishedPulling="2025-12-05 01:25:11.238236032 +0000 UTC m=+886.577628331" observedRunningTime="2025-12-05 01:25:11.861310157 +0000 UTC m=+887.200702466" watchObservedRunningTime="2025-12-05 01:25:11.866436073 +0000 UTC m=+887.205828392" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.872846 4665 scope.go:117] "RemoveContainer" containerID="c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.876052 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.876207 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r8ggg\" (UniqueName: \"kubernetes.io/projected/c0d9247c-6408-4079-853c-25e65bc59cea-kube-api-access-r8ggg\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.880727 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0d9247c-6408-4079-853c-25e65bc59cea" (UID: "c0d9247c-6408-4079-853c-25e65bc59cea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.888675 4665 scope.go:117] "RemoveContainer" containerID="877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.905898 4665 scope.go:117] "RemoveContainer" containerID="ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669" Dec 05 01:25:11 crc kubenswrapper[4665]: E1205 01:25:11.906414 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669\": container with ID starting with ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669 not found: ID does not exist" containerID="ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.906454 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669"} err="failed to get container status \"ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669\": rpc error: code = NotFound desc = could not find container \"ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669\": container with ID starting with ebee4367bfad252e5aac7e667f4a82e538a0ee3806799b65af5361fbe84e4669 not found: ID does not exist" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.906485 4665 scope.go:117] "RemoveContainer" containerID="c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481" Dec 05 01:25:11 crc kubenswrapper[4665]: E1205 01:25:11.907011 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481\": container with ID starting with c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481 not found: ID does not exist" containerID="c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.907030 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481"} err="failed to get container status \"c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481\": rpc error: code = NotFound desc = could not find container \"c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481\": container with ID starting with c3db01a26cadd7345ae4aaa9401dd2a6bd64c03e3b1c80414aed7f9c3649d481 not found: ID does not exist" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.907042 4665 scope.go:117] "RemoveContainer" containerID="877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4" Dec 05 01:25:11 crc kubenswrapper[4665]: E1205 01:25:11.907389 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4\": container with ID starting with 877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4 not found: ID does not exist" containerID="877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.907420 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4"} err="failed to get container status \"877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4\": rpc error: code = NotFound desc = could not find container \"877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4\": container with ID starting with 877eeb0abb806ca64b014e8b978516ce81f4ab318869803ab78a0b213d8b4fc4 not found: ID does not exist" Dec 05 01:25:11 crc kubenswrapper[4665]: I1205 01:25:11.977857 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0d9247c-6408-4079-853c-25e65bc59cea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:12 crc kubenswrapper[4665]: I1205 01:25:12.177205 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q84b5"] Dec 05 01:25:12 crc kubenswrapper[4665]: I1205 01:25:12.185922 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-q84b5"] Dec 05 01:25:12 crc kubenswrapper[4665]: I1205 01:25:12.899416 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" path="/var/lib/kubelet/pods/c0d9247c-6408-4079-853c-25e65bc59cea/volumes" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.812220 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm"] Dec 05 01:25:14 crc kubenswrapper[4665]: E1205 01:25:14.812433 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" containerName="extract-content" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.812446 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" containerName="extract-content" Dec 05 01:25:14 crc kubenswrapper[4665]: E1205 01:25:14.812463 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" containerName="registry-server" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.812470 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" containerName="registry-server" Dec 05 01:25:14 crc kubenswrapper[4665]: E1205 01:25:14.812484 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" containerName="extract-utilities" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.812491 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" containerName="extract-utilities" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.812593 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0d9247c-6408-4079-853c-25e65bc59cea" containerName="registry-server" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.813150 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.815866 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-q9b9l" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.821605 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9"] Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.822316 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:14 crc kubenswrapper[4665]: W1205 01:25:14.823368 4665 reflector.go:561] object-"openshift-nmstate"/"openshift-nmstate-webhook": failed to list *v1.Secret: secrets "openshift-nmstate-webhook" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-nmstate": no relationship found between node 'crc' and this object Dec 05 01:25:14 crc kubenswrapper[4665]: E1205 01:25:14.823401 4665 reflector.go:158] "Unhandled Error" err="object-\"openshift-nmstate\"/\"openshift-nmstate-webhook\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-nmstate-webhook\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-nmstate\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.842073 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm"] Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.844967 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9"] Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.888217 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-glrwt"] Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.888953 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.915367 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e3ec97f0-b128-4247-aa81-c51298bd148c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-jgwh9\" (UID: \"e3ec97f0-b128-4247-aa81-c51298bd148c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.915419 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptpqq\" (UniqueName: \"kubernetes.io/projected/e3ec97f0-b128-4247-aa81-c51298bd148c-kube-api-access-ptpqq\") pod \"nmstate-webhook-5f6d4c5ccb-jgwh9\" (UID: \"e3ec97f0-b128-4247-aa81-c51298bd148c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:14 crc kubenswrapper[4665]: I1205 01:25:14.915443 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wftrj\" (UniqueName: \"kubernetes.io/projected/0e886560-4686-45f7-b50d-c0544fc53448-kube-api-access-wftrj\") pod \"nmstate-metrics-7f946cbc9-hgsvm\" (UID: \"0e886560-4686-45f7-b50d-c0544fc53448\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.016225 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-nmstate-lock\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.016342 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljxbt\" (UniqueName: \"kubernetes.io/projected/faa11fdb-cf1b-48a0-87b0-d40430440a73-kube-api-access-ljxbt\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.016395 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-dbus-socket\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.016445 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-ovs-socket\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.016485 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e3ec97f0-b128-4247-aa81-c51298bd148c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-jgwh9\" (UID: \"e3ec97f0-b128-4247-aa81-c51298bd148c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.016522 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptpqq\" (UniqueName: \"kubernetes.io/projected/e3ec97f0-b128-4247-aa81-c51298bd148c-kube-api-access-ptpqq\") pod \"nmstate-webhook-5f6d4c5ccb-jgwh9\" (UID: \"e3ec97f0-b128-4247-aa81-c51298bd148c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.016545 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wftrj\" (UniqueName: \"kubernetes.io/projected/0e886560-4686-45f7-b50d-c0544fc53448-kube-api-access-wftrj\") pod \"nmstate-metrics-7f946cbc9-hgsvm\" (UID: \"0e886560-4686-45f7-b50d-c0544fc53448\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.018544 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l"] Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.019147 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.020541 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.021016 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.021663 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-lt7kf" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.048345 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptpqq\" (UniqueName: \"kubernetes.io/projected/e3ec97f0-b128-4247-aa81-c51298bd148c-kube-api-access-ptpqq\") pod \"nmstate-webhook-5f6d4c5ccb-jgwh9\" (UID: \"e3ec97f0-b128-4247-aa81-c51298bd148c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.051628 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l"] Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.076390 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wftrj\" (UniqueName: \"kubernetes.io/projected/0e886560-4686-45f7-b50d-c0544fc53448-kube-api-access-wftrj\") pod \"nmstate-metrics-7f946cbc9-hgsvm\" (UID: \"0e886560-4686-45f7-b50d-c0544fc53448\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117262 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-nmstate-lock\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117365 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c556144-28be-4719-91ae-78d016ea7d7a-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117393 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljxbt\" (UniqueName: \"kubernetes.io/projected/faa11fdb-cf1b-48a0-87b0-d40430440a73-kube-api-access-ljxbt\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117427 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-dbus-socket\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117450 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-ovs-socket\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117487 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0c556144-28be-4719-91ae-78d016ea7d7a-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117517 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n898p\" (UniqueName: \"kubernetes.io/projected/0c556144-28be-4719-91ae-78d016ea7d7a-kube-api-access-n898p\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117603 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-nmstate-lock\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.117904 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-ovs-socket\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.118118 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/faa11fdb-cf1b-48a0-87b0-d40430440a73-dbus-socket\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.156646 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljxbt\" (UniqueName: \"kubernetes.io/projected/faa11fdb-cf1b-48a0-87b0-d40430440a73-kube-api-access-ljxbt\") pod \"nmstate-handler-glrwt\" (UID: \"faa11fdb-cf1b-48a0-87b0-d40430440a73\") " pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.178987 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.218613 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0c556144-28be-4719-91ae-78d016ea7d7a-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.218678 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n898p\" (UniqueName: \"kubernetes.io/projected/0c556144-28be-4719-91ae-78d016ea7d7a-kube-api-access-n898p\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.218715 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c556144-28be-4719-91ae-78d016ea7d7a-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.220072 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/0c556144-28be-4719-91ae-78d016ea7d7a-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.220851 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.228742 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/0c556144-28be-4719-91ae-78d016ea7d7a-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.244472 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n898p\" (UniqueName: \"kubernetes.io/projected/0c556144-28be-4719-91ae-78d016ea7d7a-kube-api-access-n898p\") pod \"nmstate-console-plugin-7fbb5f6569-dp69l\" (UID: \"0c556144-28be-4719-91ae-78d016ea7d7a\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.287384 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-758c8fb5b-97nc7"] Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.287999 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.303170 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-758c8fb5b-97nc7"] Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.333228 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.422868 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-service-ca\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.423241 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-trusted-ca-bundle\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.423278 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-console-config\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.423339 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-oauth-serving-cert\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.423366 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f057defc-69d7-4121-92d6-ef0537848dc1-console-serving-cert\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.423393 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f057defc-69d7-4121-92d6-ef0537848dc1-console-oauth-config\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.423432 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b6pn\" (UniqueName: \"kubernetes.io/projected/f057defc-69d7-4121-92d6-ef0537848dc1-kube-api-access-2b6pn\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.524155 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-service-ca\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.524210 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-trusted-ca-bundle\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.524234 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-console-config\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.524249 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-oauth-serving-cert\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.524271 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f057defc-69d7-4121-92d6-ef0537848dc1-console-serving-cert\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.524317 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f057defc-69d7-4121-92d6-ef0537848dc1-console-oauth-config\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.524355 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b6pn\" (UniqueName: \"kubernetes.io/projected/f057defc-69d7-4121-92d6-ef0537848dc1-kube-api-access-2b6pn\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.526520 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-service-ca\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.527666 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-console-config\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.527814 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-trusted-ca-bundle\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.530248 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/f057defc-69d7-4121-92d6-ef0537848dc1-oauth-serving-cert\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.532634 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/f057defc-69d7-4121-92d6-ef0537848dc1-console-oauth-config\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.534571 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/f057defc-69d7-4121-92d6-ef0537848dc1-console-serving-cert\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.546957 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b6pn\" (UniqueName: \"kubernetes.io/projected/f057defc-69d7-4121-92d6-ef0537848dc1-kube-api-access-2b6pn\") pod \"console-758c8fb5b-97nc7\" (UID: \"f057defc-69d7-4121-92d6-ef0537848dc1\") " pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.582441 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l"] Dec 05 01:25:15 crc kubenswrapper[4665]: W1205 01:25:15.587435 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c556144_28be_4719_91ae_78d016ea7d7a.slice/crio-b548de68f5145ca50e7906626d1c408723d5a2c5f3473445a45bae1c8a69f6c0 WatchSource:0}: Error finding container b548de68f5145ca50e7906626d1c408723d5a2c5f3473445a45bae1c8a69f6c0: Status 404 returned error can't find the container with id b548de68f5145ca50e7906626d1c408723d5a2c5f3473445a45bae1c8a69f6c0 Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.603334 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.763632 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm"] Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.809452 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-758c8fb5b-97nc7"] Dec 05 01:25:15 crc kubenswrapper[4665]: W1205 01:25:15.816062 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf057defc_69d7_4121_92d6_ef0537848dc1.slice/crio-1ca6102549813c3cca3cd06d89f23fc9b2a0478cce38bdc113c4feb3ee28bb41 WatchSource:0}: Error finding container 1ca6102549813c3cca3cd06d89f23fc9b2a0478cce38bdc113c4feb3ee28bb41: Status 404 returned error can't find the container with id 1ca6102549813c3cca3cd06d89f23fc9b2a0478cce38bdc113c4feb3ee28bb41 Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.888385 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" event={"ID":"0e886560-4686-45f7-b50d-c0544fc53448","Type":"ContainerStarted","Data":"706d82df349649f3394c9e387db107b3a98ca7f4727a776aa61975ce0b01481c"} Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.889569 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" event={"ID":"0c556144-28be-4719-91ae-78d016ea7d7a","Type":"ContainerStarted","Data":"b548de68f5145ca50e7906626d1c408723d5a2c5f3473445a45bae1c8a69f6c0"} Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.891497 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-glrwt" event={"ID":"faa11fdb-cf1b-48a0-87b0-d40430440a73","Type":"ContainerStarted","Data":"6f60ba495df9194333ae6375caa192a8b5554c6de34e1edcee503eaa2670ac4e"} Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.894023 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-758c8fb5b-97nc7" event={"ID":"f057defc-69d7-4121-92d6-ef0537848dc1","Type":"ContainerStarted","Data":"1ca6102549813c3cca3cd06d89f23fc9b2a0478cce38bdc113c4feb3ee28bb41"} Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.942923 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 01:25:15 crc kubenswrapper[4665]: I1205 01:25:15.954555 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/e3ec97f0-b128-4247-aa81-c51298bd148c-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-jgwh9\" (UID: \"e3ec97f0-b128-4247-aa81-c51298bd148c\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.087677 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.463615 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.464018 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.531379 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.584727 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9"] Dec 05 01:25:16 crc kubenswrapper[4665]: W1205 01:25:16.601408 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode3ec97f0_b128_4247_aa81_c51298bd148c.slice/crio-0f0a45e616fa0204100b3ed1fa296bc03a329b40ba2ac1d339295dfb16c6f66f WatchSource:0}: Error finding container 0f0a45e616fa0204100b3ed1fa296bc03a329b40ba2ac1d339295dfb16c6f66f: Status 404 returned error can't find the container with id 0f0a45e616fa0204100b3ed1fa296bc03a329b40ba2ac1d339295dfb16c6f66f Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.900671 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" event={"ID":"e3ec97f0-b128-4247-aa81-c51298bd148c","Type":"ContainerStarted","Data":"0f0a45e616fa0204100b3ed1fa296bc03a329b40ba2ac1d339295dfb16c6f66f"} Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.901868 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-758c8fb5b-97nc7" event={"ID":"f057defc-69d7-4121-92d6-ef0537848dc1","Type":"ContainerStarted","Data":"4e7e0d9c434b8596647ea5f96bc1dcac98a04e73c1b060416203f832569c83c6"} Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.924378 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-758c8fb5b-97nc7" podStartSLOduration=1.9243587720000002 podStartE2EDuration="1.924358772s" podCreationTimestamp="2025-12-05 01:25:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:25:16.919027321 +0000 UTC m=+892.258419630" watchObservedRunningTime="2025-12-05 01:25:16.924358772 +0000 UTC m=+892.263751071" Dec 05 01:25:16 crc kubenswrapper[4665]: I1205 01:25:16.948373 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.912316 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" event={"ID":"0c556144-28be-4719-91ae-78d016ea7d7a","Type":"ContainerStarted","Data":"22e6f2b62afb13d5d58534d541249c22113659cc9b9063104edae3be7dc576b2"} Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.918105 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-glrwt" event={"ID":"faa11fdb-cf1b-48a0-87b0-d40430440a73","Type":"ContainerStarted","Data":"96ebd0634ba1bb0f3f6262097ae4b6eed5495482349552b56527970a97cbd260"} Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.918170 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.920389 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" event={"ID":"e3ec97f0-b128-4247-aa81-c51298bd148c","Type":"ContainerStarted","Data":"d3dadddc0fac52cb37ddfcdd2068f1bd572769bfa46aa9987b2af5041646767b"} Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.920750 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.921935 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" event={"ID":"0e886560-4686-45f7-b50d-c0544fc53448","Type":"ContainerStarted","Data":"edc8d6a921cacf9b02a56e423ceaa38d7bee5245d0dbffb2e9829535fc64611c"} Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.939673 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-v7kmp"] Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.941383 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-v7kmp" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerName="registry-server" containerID="cri-o://c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc" gracePeriod=2 Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.957397 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-dp69l" podStartSLOduration=1.519492161 podStartE2EDuration="3.957376796s" podCreationTimestamp="2025-12-05 01:25:15 +0000 UTC" firstStartedPulling="2025-12-05 01:25:15.589780394 +0000 UTC m=+890.929172693" lastFinishedPulling="2025-12-05 01:25:18.027665029 +0000 UTC m=+893.367057328" observedRunningTime="2025-12-05 01:25:18.942682352 +0000 UTC m=+894.282074661" watchObservedRunningTime="2025-12-05 01:25:18.957376796 +0000 UTC m=+894.296769095" Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.970189 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-glrwt" podStartSLOduration=2.216410799 podStartE2EDuration="4.970169456s" podCreationTimestamp="2025-12-05 01:25:14 +0000 UTC" firstStartedPulling="2025-12-05 01:25:15.307212459 +0000 UTC m=+890.646604758" lastFinishedPulling="2025-12-05 01:25:18.060971126 +0000 UTC m=+893.400363415" observedRunningTime="2025-12-05 01:25:18.95668327 +0000 UTC m=+894.296075579" watchObservedRunningTime="2025-12-05 01:25:18.970169456 +0000 UTC m=+894.309561755" Dec 05 01:25:18 crc kubenswrapper[4665]: I1205 01:25:18.983199 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" podStartSLOduration=3.55900354 podStartE2EDuration="4.983182672s" podCreationTimestamp="2025-12-05 01:25:14 +0000 UTC" firstStartedPulling="2025-12-05 01:25:16.605032082 +0000 UTC m=+891.944424381" lastFinishedPulling="2025-12-05 01:25:18.029211214 +0000 UTC m=+893.368603513" observedRunningTime="2025-12-05 01:25:18.978924655 +0000 UTC m=+894.318316954" watchObservedRunningTime="2025-12-05 01:25:18.983182672 +0000 UTC m=+894.322574971" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.290339 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.394038 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-utilities\") pod \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.394408 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vk22g\" (UniqueName: \"kubernetes.io/projected/4308dc80-0ff6-4591-b5f2-09962e3ea32d-kube-api-access-vk22g\") pod \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.394442 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-catalog-content\") pod \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\" (UID: \"4308dc80-0ff6-4591-b5f2-09962e3ea32d\") " Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.395524 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-utilities" (OuterVolumeSpecName: "utilities") pod "4308dc80-0ff6-4591-b5f2-09962e3ea32d" (UID: "4308dc80-0ff6-4591-b5f2-09962e3ea32d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.400009 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4308dc80-0ff6-4591-b5f2-09962e3ea32d-kube-api-access-vk22g" (OuterVolumeSpecName: "kube-api-access-vk22g") pod "4308dc80-0ff6-4591-b5f2-09962e3ea32d" (UID: "4308dc80-0ff6-4591-b5f2-09962e3ea32d"). InnerVolumeSpecName "kube-api-access-vk22g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.419454 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4308dc80-0ff6-4591-b5f2-09962e3ea32d" (UID: "4308dc80-0ff6-4591-b5f2-09962e3ea32d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.496449 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.496480 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vk22g\" (UniqueName: \"kubernetes.io/projected/4308dc80-0ff6-4591-b5f2-09962e3ea32d-kube-api-access-vk22g\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.496494 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4308dc80-0ff6-4591-b5f2-09962e3ea32d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.932825 4665 generic.go:334] "Generic (PLEG): container finished" podID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerID="c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc" exitCode=0 Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.933423 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v7kmp" event={"ID":"4308dc80-0ff6-4591-b5f2-09962e3ea32d","Type":"ContainerDied","Data":"c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc"} Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.933486 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-v7kmp" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.933509 4665 scope.go:117] "RemoveContainer" containerID="c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc" Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.933490 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-v7kmp" event={"ID":"4308dc80-0ff6-4591-b5f2-09962e3ea32d","Type":"ContainerDied","Data":"e3d2f59de765fff3b95c5aaa39a85a26551858dd7493e874c9c0c39d0eb5c76d"} Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.971587 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-v7kmp"] Dec 05 01:25:19 crc kubenswrapper[4665]: I1205 01:25:19.975605 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-v7kmp"] Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.325215 4665 scope.go:117] "RemoveContainer" containerID="69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.340465 4665 scope.go:117] "RemoveContainer" containerID="45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.380206 4665 scope.go:117] "RemoveContainer" containerID="c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc" Dec 05 01:25:20 crc kubenswrapper[4665]: E1205 01:25:20.380763 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc\": container with ID starting with c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc not found: ID does not exist" containerID="c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.380818 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc"} err="failed to get container status \"c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc\": rpc error: code = NotFound desc = could not find container \"c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc\": container with ID starting with c6539d2218b1434e2bb878df1985accf286d1ca96c68c8cfda43d646d71ceedc not found: ID does not exist" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.380844 4665 scope.go:117] "RemoveContainer" containerID="69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372" Dec 05 01:25:20 crc kubenswrapper[4665]: E1205 01:25:20.381193 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372\": container with ID starting with 69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372 not found: ID does not exist" containerID="69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.381222 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372"} err="failed to get container status \"69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372\": rpc error: code = NotFound desc = could not find container \"69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372\": container with ID starting with 69614e3353542691aa7d063b388b8d53da382fd2494e311cc1c4c0f02621e372 not found: ID does not exist" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.381245 4665 scope.go:117] "RemoveContainer" containerID="45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e" Dec 05 01:25:20 crc kubenswrapper[4665]: E1205 01:25:20.381573 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e\": container with ID starting with 45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e not found: ID does not exist" containerID="45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.381613 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e"} err="failed to get container status \"45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e\": rpc error: code = NotFound desc = could not find container \"45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e\": container with ID starting with 45d9a93330c9852c1d5c060e292556746387093b42919b54cdec0d859b54484e not found: ID does not exist" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.900596 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" path="/var/lib/kubelet/pods/4308dc80-0ff6-4591-b5f2-09962e3ea32d/volumes" Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.940327 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" event={"ID":"0e886560-4686-45f7-b50d-c0544fc53448","Type":"ContainerStarted","Data":"3dcbd1c65fb4550000f0c3dbe34a544cd2deb015f55701e5b99604ddc3b08ffb"} Dec 05 01:25:20 crc kubenswrapper[4665]: I1205 01:25:20.952749 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-hgsvm" podStartSLOduration=2.344054206 podStartE2EDuration="6.952738165s" podCreationTimestamp="2025-12-05 01:25:14 +0000 UTC" firstStartedPulling="2025-12-05 01:25:15.776678966 +0000 UTC m=+891.116071265" lastFinishedPulling="2025-12-05 01:25:20.385362925 +0000 UTC m=+895.724755224" observedRunningTime="2025-12-05 01:25:20.952425818 +0000 UTC m=+896.291818147" watchObservedRunningTime="2025-12-05 01:25:20.952738165 +0000 UTC m=+896.292130464" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.929693 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vw2rd"] Dec 05 01:25:22 crc kubenswrapper[4665]: E1205 01:25:22.930288 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerName="registry-server" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.930312 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerName="registry-server" Dec 05 01:25:22 crc kubenswrapper[4665]: E1205 01:25:22.930348 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerName="extract-content" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.930357 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerName="extract-content" Dec 05 01:25:22 crc kubenswrapper[4665]: E1205 01:25:22.930367 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerName="extract-utilities" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.930375 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerName="extract-utilities" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.930504 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="4308dc80-0ff6-4591-b5f2-09962e3ea32d" containerName="registry-server" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.931395 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.938897 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-utilities\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.939119 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-br85h\" (UniqueName: \"kubernetes.io/projected/e55b36d5-884f-4e2e-b247-839c0a00c67f-kube-api-access-br85h\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.939167 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-catalog-content\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:22 crc kubenswrapper[4665]: I1205 01:25:22.981152 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vw2rd"] Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.039949 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-utilities\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.040049 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-br85h\" (UniqueName: \"kubernetes.io/projected/e55b36d5-884f-4e2e-b247-839c0a00c67f-kube-api-access-br85h\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.040080 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-catalog-content\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.040486 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-utilities\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.040538 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-catalog-content\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.059697 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-br85h\" (UniqueName: \"kubernetes.io/projected/e55b36d5-884f-4e2e-b247-839c0a00c67f-kube-api-access-br85h\") pod \"community-operators-vw2rd\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.248685 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.701294 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vw2rd"] Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.973628 4665 generic.go:334] "Generic (PLEG): container finished" podID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerID="b087d742e7c24d2cfc63973af8e9d663681fed3ae2bc420d7bf6915ac58ca9f6" exitCode=0 Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.973675 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vw2rd" event={"ID":"e55b36d5-884f-4e2e-b247-839c0a00c67f","Type":"ContainerDied","Data":"b087d742e7c24d2cfc63973af8e9d663681fed3ae2bc420d7bf6915ac58ca9f6"} Dec 05 01:25:23 crc kubenswrapper[4665]: I1205 01:25:23.973706 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vw2rd" event={"ID":"e55b36d5-884f-4e2e-b247-839c0a00c67f","Type":"ContainerStarted","Data":"7c325759ebba22d5153644fb8ac93317db4859e21ba29f31f453a05e797be040"} Dec 05 01:25:24 crc kubenswrapper[4665]: I1205 01:25:24.981644 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vw2rd" event={"ID":"e55b36d5-884f-4e2e-b247-839c0a00c67f","Type":"ContainerStarted","Data":"3a8841a654a4d583809f4be93af3152be392f548ad0202c2a7c1b87ab5e87a3d"} Dec 05 01:25:25 crc kubenswrapper[4665]: I1205 01:25:25.243789 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-glrwt" Dec 05 01:25:25 crc kubenswrapper[4665]: I1205 01:25:25.604030 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:25 crc kubenswrapper[4665]: I1205 01:25:25.604709 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:25 crc kubenswrapper[4665]: I1205 01:25:25.613103 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:25 crc kubenswrapper[4665]: I1205 01:25:25.993145 4665 generic.go:334] "Generic (PLEG): container finished" podID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerID="3a8841a654a4d583809f4be93af3152be392f548ad0202c2a7c1b87ab5e87a3d" exitCode=0 Dec 05 01:25:25 crc kubenswrapper[4665]: I1205 01:25:25.993237 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vw2rd" event={"ID":"e55b36d5-884f-4e2e-b247-839c0a00c67f","Type":"ContainerDied","Data":"3a8841a654a4d583809f4be93af3152be392f548ad0202c2a7c1b87ab5e87a3d"} Dec 05 01:25:26 crc kubenswrapper[4665]: I1205 01:25:26.000743 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-758c8fb5b-97nc7" Dec 05 01:25:26 crc kubenswrapper[4665]: I1205 01:25:26.070370 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xv889"] Dec 05 01:25:27 crc kubenswrapper[4665]: I1205 01:25:27.001088 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vw2rd" event={"ID":"e55b36d5-884f-4e2e-b247-839c0a00c67f","Type":"ContainerStarted","Data":"47911207e0864ddabfccdae64747f43203ae258cba9616e2c9789da32fe9438e"} Dec 05 01:25:27 crc kubenswrapper[4665]: I1205 01:25:27.023243 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vw2rd" podStartSLOduration=2.4141933079999998 podStartE2EDuration="5.023224199s" podCreationTimestamp="2025-12-05 01:25:22 +0000 UTC" firstStartedPulling="2025-12-05 01:25:23.9751242 +0000 UTC m=+899.314516499" lastFinishedPulling="2025-12-05 01:25:26.584155091 +0000 UTC m=+901.923547390" observedRunningTime="2025-12-05 01:25:27.018379119 +0000 UTC m=+902.357771428" watchObservedRunningTime="2025-12-05 01:25:27.023224199 +0000 UTC m=+902.362616498" Dec 05 01:25:33 crc kubenswrapper[4665]: I1205 01:25:33.249090 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:33 crc kubenswrapper[4665]: I1205 01:25:33.249512 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:33 crc kubenswrapper[4665]: I1205 01:25:33.284431 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:34 crc kubenswrapper[4665]: I1205 01:25:34.090127 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:36 crc kubenswrapper[4665]: I1205 01:25:36.094456 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-jgwh9" Dec 05 01:25:36 crc kubenswrapper[4665]: I1205 01:25:36.321884 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vw2rd"] Dec 05 01:25:36 crc kubenswrapper[4665]: I1205 01:25:36.322088 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vw2rd" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerName="registry-server" containerID="cri-o://47911207e0864ddabfccdae64747f43203ae258cba9616e2c9789da32fe9438e" gracePeriod=2 Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.137896 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mghht"] Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.140055 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.160624 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mghht"] Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.316892 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-utilities\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.316939 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jx946\" (UniqueName: \"kubernetes.io/projected/dae4641f-316a-425e-a3f9-b86210458245-kube-api-access-jx946\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.317040 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-catalog-content\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.418031 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-catalog-content\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.418132 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-utilities\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.418157 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jx946\" (UniqueName: \"kubernetes.io/projected/dae4641f-316a-425e-a3f9-b86210458245-kube-api-access-jx946\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.418737 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-utilities\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.418737 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-catalog-content\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.440976 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jx946\" (UniqueName: \"kubernetes.io/projected/dae4641f-316a-425e-a3f9-b86210458245-kube-api-access-jx946\") pod \"certified-operators-mghht\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.493207 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:37 crc kubenswrapper[4665]: I1205 01:25:37.752275 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mghht"] Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.071791 4665 generic.go:334] "Generic (PLEG): container finished" podID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerID="47911207e0864ddabfccdae64747f43203ae258cba9616e2c9789da32fe9438e" exitCode=0 Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.071847 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vw2rd" event={"ID":"e55b36d5-884f-4e2e-b247-839c0a00c67f","Type":"ContainerDied","Data":"47911207e0864ddabfccdae64747f43203ae258cba9616e2c9789da32fe9438e"} Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.073321 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mghht" event={"ID":"dae4641f-316a-425e-a3f9-b86210458245","Type":"ContainerStarted","Data":"05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7"} Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.073887 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mghht" event={"ID":"dae4641f-316a-425e-a3f9-b86210458245","Type":"ContainerStarted","Data":"6e9f8dd4b0a792e5a8ed1675a5e6b9625dda1a8d897c465aaf70a199cdb312da"} Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.539952 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.637742 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-catalog-content\") pod \"e55b36d5-884f-4e2e-b247-839c0a00c67f\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.637816 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-utilities\") pod \"e55b36d5-884f-4e2e-b247-839c0a00c67f\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.637843 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-br85h\" (UniqueName: \"kubernetes.io/projected/e55b36d5-884f-4e2e-b247-839c0a00c67f-kube-api-access-br85h\") pod \"e55b36d5-884f-4e2e-b247-839c0a00c67f\" (UID: \"e55b36d5-884f-4e2e-b247-839c0a00c67f\") " Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.639507 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-utilities" (OuterVolumeSpecName: "utilities") pod "e55b36d5-884f-4e2e-b247-839c0a00c67f" (UID: "e55b36d5-884f-4e2e-b247-839c0a00c67f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.644851 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e55b36d5-884f-4e2e-b247-839c0a00c67f-kube-api-access-br85h" (OuterVolumeSpecName: "kube-api-access-br85h") pod "e55b36d5-884f-4e2e-b247-839c0a00c67f" (UID: "e55b36d5-884f-4e2e-b247-839c0a00c67f"). InnerVolumeSpecName "kube-api-access-br85h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.688628 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e55b36d5-884f-4e2e-b247-839c0a00c67f" (UID: "e55b36d5-884f-4e2e-b247-839c0a00c67f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.739531 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.739565 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-br85h\" (UniqueName: \"kubernetes.io/projected/e55b36d5-884f-4e2e-b247-839c0a00c67f-kube-api-access-br85h\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:38 crc kubenswrapper[4665]: I1205 01:25:38.739576 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e55b36d5-884f-4e2e-b247-839c0a00c67f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.368961 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vw2rd" event={"ID":"e55b36d5-884f-4e2e-b247-839c0a00c67f","Type":"ContainerDied","Data":"7c325759ebba22d5153644fb8ac93317db4859e21ba29f31f453a05e797be040"} Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.369014 4665 scope.go:117] "RemoveContainer" containerID="47911207e0864ddabfccdae64747f43203ae258cba9616e2c9789da32fe9438e" Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.369276 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vw2rd" Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.374262 4665 generic.go:334] "Generic (PLEG): container finished" podID="dae4641f-316a-425e-a3f9-b86210458245" containerID="05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7" exitCode=0 Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.374321 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mghht" event={"ID":"dae4641f-316a-425e-a3f9-b86210458245","Type":"ContainerDied","Data":"05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7"} Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.396912 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vw2rd"] Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.404931 4665 scope.go:117] "RemoveContainer" containerID="3a8841a654a4d583809f4be93af3152be392f548ad0202c2a7c1b87ab5e87a3d" Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.405388 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vw2rd"] Dec 05 01:25:39 crc kubenswrapper[4665]: I1205 01:25:39.426505 4665 scope.go:117] "RemoveContainer" containerID="b087d742e7c24d2cfc63973af8e9d663681fed3ae2bc420d7bf6915ac58ca9f6" Dec 05 01:25:40 crc kubenswrapper[4665]: I1205 01:25:40.382092 4665 generic.go:334] "Generic (PLEG): container finished" podID="dae4641f-316a-425e-a3f9-b86210458245" containerID="8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715" exitCode=0 Dec 05 01:25:40 crc kubenswrapper[4665]: I1205 01:25:40.382167 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mghht" event={"ID":"dae4641f-316a-425e-a3f9-b86210458245","Type":"ContainerDied","Data":"8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715"} Dec 05 01:25:40 crc kubenswrapper[4665]: I1205 01:25:40.900410 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" path="/var/lib/kubelet/pods/e55b36d5-884f-4e2e-b247-839c0a00c67f/volumes" Dec 05 01:25:41 crc kubenswrapper[4665]: I1205 01:25:41.388628 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mghht" event={"ID":"dae4641f-316a-425e-a3f9-b86210458245","Type":"ContainerStarted","Data":"b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee"} Dec 05 01:25:47 crc kubenswrapper[4665]: I1205 01:25:47.494143 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:47 crc kubenswrapper[4665]: I1205 01:25:47.494676 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:47 crc kubenswrapper[4665]: I1205 01:25:47.532336 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:47 crc kubenswrapper[4665]: I1205 01:25:47.549856 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mghht" podStartSLOduration=9.150437501 podStartE2EDuration="10.549838505s" podCreationTimestamp="2025-12-05 01:25:37 +0000 UTC" firstStartedPulling="2025-12-05 01:25:39.375878079 +0000 UTC m=+914.715270378" lastFinishedPulling="2025-12-05 01:25:40.775279083 +0000 UTC m=+916.114671382" observedRunningTime="2025-12-05 01:25:41.408176305 +0000 UTC m=+916.747568604" watchObservedRunningTime="2025-12-05 01:25:47.549838505 +0000 UTC m=+922.889230814" Dec 05 01:25:48 crc kubenswrapper[4665]: I1205 01:25:48.516132 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:48 crc kubenswrapper[4665]: I1205 01:25:48.561929 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mghht"] Dec 05 01:25:50 crc kubenswrapper[4665]: I1205 01:25:50.465354 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mghht" podUID="dae4641f-316a-425e-a3f9-b86210458245" containerName="registry-server" containerID="cri-o://b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee" gracePeriod=2 Dec 05 01:25:50 crc kubenswrapper[4665]: I1205 01:25:50.818512 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:50 crc kubenswrapper[4665]: I1205 01:25:50.907239 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-catalog-content\") pod \"dae4641f-316a-425e-a3f9-b86210458245\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " Dec 05 01:25:50 crc kubenswrapper[4665]: I1205 01:25:50.907356 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jx946\" (UniqueName: \"kubernetes.io/projected/dae4641f-316a-425e-a3f9-b86210458245-kube-api-access-jx946\") pod \"dae4641f-316a-425e-a3f9-b86210458245\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " Dec 05 01:25:50 crc kubenswrapper[4665]: I1205 01:25:50.907417 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-utilities\") pod \"dae4641f-316a-425e-a3f9-b86210458245\" (UID: \"dae4641f-316a-425e-a3f9-b86210458245\") " Dec 05 01:25:50 crc kubenswrapper[4665]: I1205 01:25:50.908258 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-utilities" (OuterVolumeSpecName: "utilities") pod "dae4641f-316a-425e-a3f9-b86210458245" (UID: "dae4641f-316a-425e-a3f9-b86210458245"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:50 crc kubenswrapper[4665]: I1205 01:25:50.914123 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dae4641f-316a-425e-a3f9-b86210458245-kube-api-access-jx946" (OuterVolumeSpecName: "kube-api-access-jx946") pod "dae4641f-316a-425e-a3f9-b86210458245" (UID: "dae4641f-316a-425e-a3f9-b86210458245"). InnerVolumeSpecName "kube-api-access-jx946". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:25:50 crc kubenswrapper[4665]: I1205 01:25:50.955044 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "dae4641f-316a-425e-a3f9-b86210458245" (UID: "dae4641f-316a-425e-a3f9-b86210458245"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.008830 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.008870 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jx946\" (UniqueName: \"kubernetes.io/projected/dae4641f-316a-425e-a3f9-b86210458245-kube-api-access-jx946\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.008885 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/dae4641f-316a-425e-a3f9-b86210458245-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.113152 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-xv889" podUID="b88c79b6-4760-4509-bee0-06de439c6ac2" containerName="console" containerID="cri-o://0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1" gracePeriod=15 Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.448554 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xv889_b88c79b6-4760-4509-bee0-06de439c6ac2/console/0.log" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.448889 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.477602 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-xv889_b88c79b6-4760-4509-bee0-06de439c6ac2/console/0.log" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.477657 4665 generic.go:334] "Generic (PLEG): container finished" podID="b88c79b6-4760-4509-bee0-06de439c6ac2" containerID="0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1" exitCode=2 Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.477755 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-xv889" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.477999 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xv889" event={"ID":"b88c79b6-4760-4509-bee0-06de439c6ac2","Type":"ContainerDied","Data":"0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1"} Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.478043 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-xv889" event={"ID":"b88c79b6-4760-4509-bee0-06de439c6ac2","Type":"ContainerDied","Data":"c6884fd73e5ece5257883795af30ced87d03432db11f8ab572a25115635ee678"} Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.478061 4665 scope.go:117] "RemoveContainer" containerID="0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.486397 4665 generic.go:334] "Generic (PLEG): container finished" podID="dae4641f-316a-425e-a3f9-b86210458245" containerID="b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee" exitCode=0 Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.486436 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mghht" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.486446 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mghht" event={"ID":"dae4641f-316a-425e-a3f9-b86210458245","Type":"ContainerDied","Data":"b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee"} Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.486475 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mghht" event={"ID":"dae4641f-316a-425e-a3f9-b86210458245","Type":"ContainerDied","Data":"6e9f8dd4b0a792e5a8ed1675a5e6b9625dda1a8d897c465aaf70a199cdb312da"} Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.500232 4665 scope.go:117] "RemoveContainer" containerID="0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1" Dec 05 01:25:51 crc kubenswrapper[4665]: E1205 01:25:51.500963 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1\": container with ID starting with 0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1 not found: ID does not exist" containerID="0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.500990 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1"} err="failed to get container status \"0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1\": rpc error: code = NotFound desc = could not find container \"0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1\": container with ID starting with 0c3ecb35060192752f35175a79fbd592e703e34a0b13e3e97baeb4d28e049cb1 not found: ID does not exist" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.501013 4665 scope.go:117] "RemoveContainer" containerID="b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.518826 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mghht"] Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.524884 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mghht"] Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.530503 4665 scope.go:117] "RemoveContainer" containerID="8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.543998 4665 scope.go:117] "RemoveContainer" containerID="05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.556918 4665 scope.go:117] "RemoveContainer" containerID="b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee" Dec 05 01:25:51 crc kubenswrapper[4665]: E1205 01:25:51.557364 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee\": container with ID starting with b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee not found: ID does not exist" containerID="b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.557396 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee"} err="failed to get container status \"b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee\": rpc error: code = NotFound desc = could not find container \"b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee\": container with ID starting with b58932e653dee89f6ab2cc68b3ef70f71184a1592ddb468029654c019c7f33ee not found: ID does not exist" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.557417 4665 scope.go:117] "RemoveContainer" containerID="8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715" Dec 05 01:25:51 crc kubenswrapper[4665]: E1205 01:25:51.557886 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715\": container with ID starting with 8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715 not found: ID does not exist" containerID="8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.557924 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715"} err="failed to get container status \"8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715\": rpc error: code = NotFound desc = could not find container \"8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715\": container with ID starting with 8cc2eb12735036b9dedc5b9403b720caed15e4df25fdfb5cc39908ef05f2f715 not found: ID does not exist" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.557952 4665 scope.go:117] "RemoveContainer" containerID="05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7" Dec 05 01:25:51 crc kubenswrapper[4665]: E1205 01:25:51.558212 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7\": container with ID starting with 05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7 not found: ID does not exist" containerID="05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.558243 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7"} err="failed to get container status \"05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7\": rpc error: code = NotFound desc = could not find container \"05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7\": container with ID starting with 05092c4359cf96802db5fa282ab035308b24d3964935a23184e4e68e67c444d7 not found: ID does not exist" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.616759 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-serving-cert\") pod \"b88c79b6-4760-4509-bee0-06de439c6ac2\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.616811 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-service-ca\") pod \"b88c79b6-4760-4509-bee0-06de439c6ac2\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.616856 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-console-config\") pod \"b88c79b6-4760-4509-bee0-06de439c6ac2\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.616899 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pblps\" (UniqueName: \"kubernetes.io/projected/b88c79b6-4760-4509-bee0-06de439c6ac2-kube-api-access-pblps\") pod \"b88c79b6-4760-4509-bee0-06de439c6ac2\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.616920 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-oauth-serving-cert\") pod \"b88c79b6-4760-4509-bee0-06de439c6ac2\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.616952 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-trusted-ca-bundle\") pod \"b88c79b6-4760-4509-bee0-06de439c6ac2\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.617016 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-oauth-config\") pod \"b88c79b6-4760-4509-bee0-06de439c6ac2\" (UID: \"b88c79b6-4760-4509-bee0-06de439c6ac2\") " Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.619058 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "b88c79b6-4760-4509-bee0-06de439c6ac2" (UID: "b88c79b6-4760-4509-bee0-06de439c6ac2"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.619643 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "b88c79b6-4760-4509-bee0-06de439c6ac2" (UID: "b88c79b6-4760-4509-bee0-06de439c6ac2"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.619675 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-console-config" (OuterVolumeSpecName: "console-config") pod "b88c79b6-4760-4509-bee0-06de439c6ac2" (UID: "b88c79b6-4760-4509-bee0-06de439c6ac2"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.620312 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-service-ca" (OuterVolumeSpecName: "service-ca") pod "b88c79b6-4760-4509-bee0-06de439c6ac2" (UID: "b88c79b6-4760-4509-bee0-06de439c6ac2"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.621738 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "b88c79b6-4760-4509-bee0-06de439c6ac2" (UID: "b88c79b6-4760-4509-bee0-06de439c6ac2"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.623157 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "b88c79b6-4760-4509-bee0-06de439c6ac2" (UID: "b88c79b6-4760-4509-bee0-06de439c6ac2"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.625617 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b88c79b6-4760-4509-bee0-06de439c6ac2-kube-api-access-pblps" (OuterVolumeSpecName: "kube-api-access-pblps") pod "b88c79b6-4760-4509-bee0-06de439c6ac2" (UID: "b88c79b6-4760-4509-bee0-06de439c6ac2"). InnerVolumeSpecName "kube-api-access-pblps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.718862 4665 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.718907 4665 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.718923 4665 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.718933 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pblps\" (UniqueName: \"kubernetes.io/projected/b88c79b6-4760-4509-bee0-06de439c6ac2-kube-api-access-pblps\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.718946 4665 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.718954 4665 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b88c79b6-4760-4509-bee0-06de439c6ac2-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.718962 4665 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/b88c79b6-4760-4509-bee0-06de439c6ac2-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.805808 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-xv889"] Dec 05 01:25:51 crc kubenswrapper[4665]: I1205 01:25:51.809135 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-xv889"] Dec 05 01:25:52 crc kubenswrapper[4665]: I1205 01:25:52.899700 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b88c79b6-4760-4509-bee0-06de439c6ac2" path="/var/lib/kubelet/pods/b88c79b6-4760-4509-bee0-06de439c6ac2/volumes" Dec 05 01:25:52 crc kubenswrapper[4665]: I1205 01:25:52.900447 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dae4641f-316a-425e-a3f9-b86210458245" path="/var/lib/kubelet/pods/dae4641f-316a-425e-a3f9-b86210458245/volumes" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406094 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g"] Dec 05 01:25:54 crc kubenswrapper[4665]: E1205 01:25:54.406321 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b88c79b6-4760-4509-bee0-06de439c6ac2" containerName="console" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406334 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b88c79b6-4760-4509-bee0-06de439c6ac2" containerName="console" Dec 05 01:25:54 crc kubenswrapper[4665]: E1205 01:25:54.406349 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae4641f-316a-425e-a3f9-b86210458245" containerName="extract-content" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406355 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae4641f-316a-425e-a3f9-b86210458245" containerName="extract-content" Dec 05 01:25:54 crc kubenswrapper[4665]: E1205 01:25:54.406361 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae4641f-316a-425e-a3f9-b86210458245" containerName="registry-server" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406367 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae4641f-316a-425e-a3f9-b86210458245" containerName="registry-server" Dec 05 01:25:54 crc kubenswrapper[4665]: E1205 01:25:54.406377 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dae4641f-316a-425e-a3f9-b86210458245" containerName="extract-utilities" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406383 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="dae4641f-316a-425e-a3f9-b86210458245" containerName="extract-utilities" Dec 05 01:25:54 crc kubenswrapper[4665]: E1205 01:25:54.406393 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerName="extract-utilities" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406399 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerName="extract-utilities" Dec 05 01:25:54 crc kubenswrapper[4665]: E1205 01:25:54.406411 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerName="registry-server" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406417 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerName="registry-server" Dec 05 01:25:54 crc kubenswrapper[4665]: E1205 01:25:54.406424 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerName="extract-content" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406429 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerName="extract-content" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406516 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b88c79b6-4760-4509-bee0-06de439c6ac2" containerName="console" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406531 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="e55b36d5-884f-4e2e-b247-839c0a00c67f" containerName="registry-server" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.406541 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="dae4641f-316a-425e-a3f9-b86210458245" containerName="registry-server" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.407335 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.414525 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.424337 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g"] Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.556142 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.556236 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6cxx\" (UniqueName: \"kubernetes.io/projected/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-kube-api-access-p6cxx\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.556290 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.657863 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6cxx\" (UniqueName: \"kubernetes.io/projected/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-kube-api-access-p6cxx\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.657932 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.657962 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.658361 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.658824 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.676562 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6cxx\" (UniqueName: \"kubernetes.io/projected/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-kube-api-access-p6cxx\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:54 crc kubenswrapper[4665]: I1205 01:25:54.722012 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:55 crc kubenswrapper[4665]: I1205 01:25:55.137284 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g"] Dec 05 01:25:55 crc kubenswrapper[4665]: I1205 01:25:55.512493 4665 generic.go:334] "Generic (PLEG): container finished" podID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerID="840a387eae95085a3fc7bb5b8f3576ba56cde498ae40a6de960e2eac4553b39a" exitCode=0 Dec 05 01:25:55 crc kubenswrapper[4665]: I1205 01:25:55.512563 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" event={"ID":"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3","Type":"ContainerDied","Data":"840a387eae95085a3fc7bb5b8f3576ba56cde498ae40a6de960e2eac4553b39a"} Dec 05 01:25:55 crc kubenswrapper[4665]: I1205 01:25:55.512878 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" event={"ID":"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3","Type":"ContainerStarted","Data":"ce657dcaa9f8e999da40beb3d2941f81a5799b3a1b1854c2eefa1aa5394f51f3"} Dec 05 01:25:56 crc kubenswrapper[4665]: I1205 01:25:56.518231 4665 generic.go:334] "Generic (PLEG): container finished" podID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerID="30aa5bf93e5a72fdfa758ea1152c2de7203f241120fac5f4d356366bca38c653" exitCode=0 Dec 05 01:25:56 crc kubenswrapper[4665]: I1205 01:25:56.518279 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" event={"ID":"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3","Type":"ContainerDied","Data":"30aa5bf93e5a72fdfa758ea1152c2de7203f241120fac5f4d356366bca38c653"} Dec 05 01:25:57 crc kubenswrapper[4665]: I1205 01:25:57.526059 4665 generic.go:334] "Generic (PLEG): container finished" podID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerID="b56d8c00e60a992635d2679ad4d8f22e4b024a88c04d41f2c61673070419bd7d" exitCode=0 Dec 05 01:25:57 crc kubenswrapper[4665]: I1205 01:25:57.526444 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" event={"ID":"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3","Type":"ContainerDied","Data":"b56d8c00e60a992635d2679ad4d8f22e4b024a88c04d41f2c61673070419bd7d"} Dec 05 01:25:58 crc kubenswrapper[4665]: I1205 01:25:58.738807 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:25:58 crc kubenswrapper[4665]: I1205 01:25:58.912346 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6cxx\" (UniqueName: \"kubernetes.io/projected/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-kube-api-access-p6cxx\") pod \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " Dec 05 01:25:58 crc kubenswrapper[4665]: I1205 01:25:58.912398 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-bundle\") pod \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " Dec 05 01:25:58 crc kubenswrapper[4665]: I1205 01:25:58.912468 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-util\") pod \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\" (UID: \"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3\") " Dec 05 01:25:58 crc kubenswrapper[4665]: I1205 01:25:58.913284 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-bundle" (OuterVolumeSpecName: "bundle") pod "3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" (UID: "3dd46a4d-4cea-4ef4-8b4a-e78d451055a3"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:58 crc kubenswrapper[4665]: I1205 01:25:58.917825 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-kube-api-access-p6cxx" (OuterVolumeSpecName: "kube-api-access-p6cxx") pod "3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" (UID: "3dd46a4d-4cea-4ef4-8b4a-e78d451055a3"). InnerVolumeSpecName "kube-api-access-p6cxx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:25:58 crc kubenswrapper[4665]: I1205 01:25:58.926151 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-util" (OuterVolumeSpecName: "util") pod "3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" (UID: "3dd46a4d-4cea-4ef4-8b4a-e78d451055a3"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:25:59 crc kubenswrapper[4665]: I1205 01:25:59.013892 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6cxx\" (UniqueName: \"kubernetes.io/projected/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-kube-api-access-p6cxx\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:59 crc kubenswrapper[4665]: I1205 01:25:59.014153 4665 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:59 crc kubenswrapper[4665]: I1205 01:25:59.014161 4665 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dd46a4d-4cea-4ef4-8b4a-e78d451055a3-util\") on node \"crc\" DevicePath \"\"" Dec 05 01:25:59 crc kubenswrapper[4665]: I1205 01:25:59.539864 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" event={"ID":"3dd46a4d-4cea-4ef4-8b4a-e78d451055a3","Type":"ContainerDied","Data":"ce657dcaa9f8e999da40beb3d2941f81a5799b3a1b1854c2eefa1aa5394f51f3"} Dec 05 01:25:59 crc kubenswrapper[4665]: I1205 01:25:59.539923 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce657dcaa9f8e999da40beb3d2941f81a5799b3a1b1854c2eefa1aa5394f51f3" Dec 05 01:25:59 crc kubenswrapper[4665]: I1205 01:25:59.539978 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.900858 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg"] Dec 05 01:26:08 crc kubenswrapper[4665]: E1205 01:26:08.901440 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerName="extract" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.901457 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerName="extract" Dec 05 01:26:08 crc kubenswrapper[4665]: E1205 01:26:08.901470 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerName="util" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.901478 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerName="util" Dec 05 01:26:08 crc kubenswrapper[4665]: E1205 01:26:08.901490 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerName="pull" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.901499 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerName="pull" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.901599 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dd46a4d-4cea-4ef4-8b4a-e78d451055a3" containerName="extract" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.901974 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.903417 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.903856 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.908344 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-m27pn" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.910514 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.910621 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 01:26:08 crc kubenswrapper[4665]: I1205 01:26:08.916714 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg"] Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.033812 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c3dedfa3-52da-4bbc-b080-ce01610f9152-apiservice-cert\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.033861 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c3dedfa3-52da-4bbc-b080-ce01610f9152-webhook-cert\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.033982 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4rq5b\" (UniqueName: \"kubernetes.io/projected/c3dedfa3-52da-4bbc-b080-ce01610f9152-kube-api-access-4rq5b\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.125060 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq"] Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.125947 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.129104 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.129491 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.131141 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-bhfzv" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.135044 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c3dedfa3-52da-4bbc-b080-ce01610f9152-apiservice-cert\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.135081 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c3dedfa3-52da-4bbc-b080-ce01610f9152-webhook-cert\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.135118 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4rq5b\" (UniqueName: \"kubernetes.io/projected/c3dedfa3-52da-4bbc-b080-ce01610f9152-kube-api-access-4rq5b\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.143928 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c3dedfa3-52da-4bbc-b080-ce01610f9152-webhook-cert\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.144990 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c3dedfa3-52da-4bbc-b080-ce01610f9152-apiservice-cert\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.154126 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4rq5b\" (UniqueName: \"kubernetes.io/projected/c3dedfa3-52da-4bbc-b080-ce01610f9152-kube-api-access-4rq5b\") pod \"metallb-operator-controller-manager-84549bb967-wwjmg\" (UID: \"c3dedfa3-52da-4bbc-b080-ce01610f9152\") " pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.189176 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq"] Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.216808 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.236780 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/067086f8-f82e-45c4-a7dd-79cacf3192e5-apiservice-cert\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.236844 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/067086f8-f82e-45c4-a7dd-79cacf3192e5-webhook-cert\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.236889 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-slzdb\" (UniqueName: \"kubernetes.io/projected/067086f8-f82e-45c4-a7dd-79cacf3192e5-kube-api-access-slzdb\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.337543 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/067086f8-f82e-45c4-a7dd-79cacf3192e5-apiservice-cert\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.337586 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/067086f8-f82e-45c4-a7dd-79cacf3192e5-webhook-cert\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.337603 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-slzdb\" (UniqueName: \"kubernetes.io/projected/067086f8-f82e-45c4-a7dd-79cacf3192e5-kube-api-access-slzdb\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.340513 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/067086f8-f82e-45c4-a7dd-79cacf3192e5-apiservice-cert\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.346899 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/067086f8-f82e-45c4-a7dd-79cacf3192e5-webhook-cert\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.360191 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-slzdb\" (UniqueName: \"kubernetes.io/projected/067086f8-f82e-45c4-a7dd-79cacf3192e5-kube-api-access-slzdb\") pod \"metallb-operator-webhook-server-6868996d58-rgvnq\" (UID: \"067086f8-f82e-45c4-a7dd-79cacf3192e5\") " pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.443408 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.785338 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg"] Dec 05 01:26:09 crc kubenswrapper[4665]: W1205 01:26:09.797475 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3dedfa3_52da_4bbc_b080_ce01610f9152.slice/crio-82914e94340470755bc206e5129f80640cdf41f6ba82189ae194aee939eefdd7 WatchSource:0}: Error finding container 82914e94340470755bc206e5129f80640cdf41f6ba82189ae194aee939eefdd7: Status 404 returned error can't find the container with id 82914e94340470755bc206e5129f80640cdf41f6ba82189ae194aee939eefdd7 Dec 05 01:26:09 crc kubenswrapper[4665]: I1205 01:26:09.845588 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq"] Dec 05 01:26:10 crc kubenswrapper[4665]: I1205 01:26:10.605079 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" event={"ID":"067086f8-f82e-45c4-a7dd-79cacf3192e5","Type":"ContainerStarted","Data":"7cc8a5828290dc0c59c3a148526adbcc7dd5b3750d6c6e36e6eab03c8d87b9e0"} Dec 05 01:26:10 crc kubenswrapper[4665]: I1205 01:26:10.606362 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" event={"ID":"c3dedfa3-52da-4bbc-b080-ce01610f9152","Type":"ContainerStarted","Data":"82914e94340470755bc206e5129f80640cdf41f6ba82189ae194aee939eefdd7"} Dec 05 01:26:16 crc kubenswrapper[4665]: I1205 01:26:16.650670 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" event={"ID":"067086f8-f82e-45c4-a7dd-79cacf3192e5","Type":"ContainerStarted","Data":"2ab3f1b7004fa31f7516e98733a3ccd7e4dbef93d5a348aac2e6c159bd0f3d48"} Dec 05 01:26:16 crc kubenswrapper[4665]: I1205 01:26:16.651126 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:16 crc kubenswrapper[4665]: I1205 01:26:16.652501 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" event={"ID":"c3dedfa3-52da-4bbc-b080-ce01610f9152","Type":"ContainerStarted","Data":"3bdd58d52b3d1238f0e8fdeb42128ae32c1928c63a6fba87b873b41876d3f72f"} Dec 05 01:26:16 crc kubenswrapper[4665]: I1205 01:26:16.652823 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:16 crc kubenswrapper[4665]: I1205 01:26:16.674582 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" podStartSLOduration=1.577160532 podStartE2EDuration="7.674564081s" podCreationTimestamp="2025-12-05 01:26:09 +0000 UTC" firstStartedPulling="2025-12-05 01:26:09.857450237 +0000 UTC m=+945.196842536" lastFinishedPulling="2025-12-05 01:26:15.954853786 +0000 UTC m=+951.294246085" observedRunningTime="2025-12-05 01:26:16.668752698 +0000 UTC m=+952.008145017" watchObservedRunningTime="2025-12-05 01:26:16.674564081 +0000 UTC m=+952.013956390" Dec 05 01:26:16 crc kubenswrapper[4665]: I1205 01:26:16.689634 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" podStartSLOduration=2.562368244 podStartE2EDuration="8.689614495s" podCreationTimestamp="2025-12-05 01:26:08 +0000 UTC" firstStartedPulling="2025-12-05 01:26:09.810482754 +0000 UTC m=+945.149875043" lastFinishedPulling="2025-12-05 01:26:15.937728995 +0000 UTC m=+951.277121294" observedRunningTime="2025-12-05 01:26:16.688403627 +0000 UTC m=+952.027795966" watchObservedRunningTime="2025-12-05 01:26:16.689614495 +0000 UTC m=+952.029006804" Dec 05 01:26:29 crc kubenswrapper[4665]: I1205 01:26:29.451123 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-6868996d58-rgvnq" Dec 05 01:26:44 crc kubenswrapper[4665]: I1205 01:26:44.921963 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:26:44 crc kubenswrapper[4665]: I1205 01:26:44.922532 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:26:49 crc kubenswrapper[4665]: I1205 01:26:49.222876 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-84549bb967-wwjmg" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.141696 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-p5jk7"] Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.144805 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.147185 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.147375 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-b2vzj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.147483 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.147677 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx"] Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.148391 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.152377 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.166033 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx"] Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.288201 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-wxh7x"] Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.289414 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.292175 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-dw2lj"] Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.293184 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.294161 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.294380 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.295454 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.295634 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.296268 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-d6dxx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.309118 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-dw2lj"] Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.309740 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvbxs\" (UniqueName: \"kubernetes.io/projected/f444b2fb-024e-4f65-84cc-4bc16a3cc6a9-kube-api-access-rvbxs\") pod \"frr-k8s-webhook-server-7fcb986d4-pchdx\" (UID: \"f444b2fb-024e-4f65-84cc-4bc16a3cc6a9\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.309804 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f444b2fb-024e-4f65-84cc-4bc16a3cc6a9-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-pchdx\" (UID: \"f444b2fb-024e-4f65-84cc-4bc16a3cc6a9\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.309837 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-sockets\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.309855 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.309891 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-reloader\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.309906 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-conf\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.309924 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics-certs\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.310006 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnkzl\" (UniqueName: \"kubernetes.io/projected/308f300a-520c-4c85-9cbd-dac3c432bdc1-kube-api-access-hnkzl\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.310052 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-startup\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411495 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metrics-certs\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411559 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-sockets\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411586 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411626 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw2pq\" (UniqueName: \"kubernetes.io/projected/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-kube-api-access-fw2pq\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411665 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-reloader\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411687 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-conf\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411708 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-cert\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411736 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics-certs\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411759 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnkzl\" (UniqueName: \"kubernetes.io/projected/308f300a-520c-4c85-9cbd-dac3c432bdc1-kube-api-access-hnkzl\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411774 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frgxt\" (UniqueName: \"kubernetes.io/projected/b1652627-99c3-4670-84ec-c770bf76a4b4-kube-api-access-frgxt\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411966 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-startup\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.411965 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-sockets\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: E1205 01:26:50.411909 4665 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 05 01:26:50 crc kubenswrapper[4665]: E1205 01:26:50.412056 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics-certs podName:308f300a-520c-4c85-9cbd-dac3c432bdc1 nodeName:}" failed. No retries permitted until 2025-12-05 01:26:50.912036645 +0000 UTC m=+986.251428934 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics-certs") pod "frr-k8s-p5jk7" (UID: "308f300a-520c-4c85-9cbd-dac3c432bdc1") : secret "frr-k8s-certs-secret" not found Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412058 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412332 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-metrics-certs\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412385 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvbxs\" (UniqueName: \"kubernetes.io/projected/f444b2fb-024e-4f65-84cc-4bc16a3cc6a9-kube-api-access-rvbxs\") pod \"frr-k8s-webhook-server-7fcb986d4-pchdx\" (UID: \"f444b2fb-024e-4f65-84cc-4bc16a3cc6a9\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412412 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412407 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-reloader\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412486 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f444b2fb-024e-4f65-84cc-4bc16a3cc6a9-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-pchdx\" (UID: \"f444b2fb-024e-4f65-84cc-4bc16a3cc6a9\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412576 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-conf\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412767 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/308f300a-520c-4c85-9cbd-dac3c432bdc1-frr-startup\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.412768 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metallb-excludel2\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.418466 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/f444b2fb-024e-4f65-84cc-4bc16a3cc6a9-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-pchdx\" (UID: \"f444b2fb-024e-4f65-84cc-4bc16a3cc6a9\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.432559 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnkzl\" (UniqueName: \"kubernetes.io/projected/308f300a-520c-4c85-9cbd-dac3c432bdc1-kube-api-access-hnkzl\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.438015 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvbxs\" (UniqueName: \"kubernetes.io/projected/f444b2fb-024e-4f65-84cc-4bc16a3cc6a9-kube-api-access-rvbxs\") pod \"frr-k8s-webhook-server-7fcb986d4-pchdx\" (UID: \"f444b2fb-024e-4f65-84cc-4bc16a3cc6a9\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.468059 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.514680 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw2pq\" (UniqueName: \"kubernetes.io/projected/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-kube-api-access-fw2pq\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.514731 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-cert\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.514781 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frgxt\" (UniqueName: \"kubernetes.io/projected/b1652627-99c3-4670-84ec-c770bf76a4b4-kube-api-access-frgxt\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.514951 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-metrics-certs\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.514982 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.515011 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metallb-excludel2\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.515028 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metrics-certs\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: E1205 01:26:50.515099 4665 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Dec 05 01:26:50 crc kubenswrapper[4665]: E1205 01:26:50.515153 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metrics-certs podName:44aa572b-ef2b-4b3a-83ef-9a45cfd73067 nodeName:}" failed. No retries permitted until 2025-12-05 01:26:51.01513914 +0000 UTC m=+986.354531439 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metrics-certs") pod "speaker-wxh7x" (UID: "44aa572b-ef2b-4b3a-83ef-9a45cfd73067") : secret "speaker-certs-secret" not found Dec 05 01:26:50 crc kubenswrapper[4665]: E1205 01:26:50.515212 4665 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 01:26:50 crc kubenswrapper[4665]: E1205 01:26:50.515284 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist podName:44aa572b-ef2b-4b3a-83ef-9a45cfd73067 nodeName:}" failed. No retries permitted until 2025-12-05 01:26:51.015265443 +0000 UTC m=+986.354657742 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist") pod "speaker-wxh7x" (UID: "44aa572b-ef2b-4b3a-83ef-9a45cfd73067") : secret "metallb-memberlist" not found Dec 05 01:26:50 crc kubenswrapper[4665]: E1205 01:26:50.515314 4665 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Dec 05 01:26:50 crc kubenswrapper[4665]: E1205 01:26:50.515372 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-metrics-certs podName:b1652627-99c3-4670-84ec-c770bf76a4b4 nodeName:}" failed. No retries permitted until 2025-12-05 01:26:51.015352685 +0000 UTC m=+986.354744984 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-metrics-certs") pod "controller-f8648f98b-dw2lj" (UID: "b1652627-99c3-4670-84ec-c770bf76a4b4") : secret "controller-certs-secret" not found Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.515776 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metallb-excludel2\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.524126 4665 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.531759 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-cert\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.538215 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frgxt\" (UniqueName: \"kubernetes.io/projected/b1652627-99c3-4670-84ec-c770bf76a4b4-kube-api-access-frgxt\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.539499 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw2pq\" (UniqueName: \"kubernetes.io/projected/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-kube-api-access-fw2pq\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.739809 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx"] Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.828164 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" event={"ID":"f444b2fb-024e-4f65-84cc-4bc16a3cc6a9","Type":"ContainerStarted","Data":"033a864879335e7a00c2bb7b49a2fb9a1d6793b314355609497a1b49a872388a"} Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.920555 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics-certs\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:50 crc kubenswrapper[4665]: I1205 01:26:50.925629 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/308f300a-520c-4c85-9cbd-dac3c432bdc1-metrics-certs\") pod \"frr-k8s-p5jk7\" (UID: \"308f300a-520c-4c85-9cbd-dac3c432bdc1\") " pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.021368 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-metrics-certs\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.021428 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.021474 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metrics-certs\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:51 crc kubenswrapper[4665]: E1205 01:26:51.022547 4665 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 01:26:51 crc kubenswrapper[4665]: E1205 01:26:51.022595 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist podName:44aa572b-ef2b-4b3a-83ef-9a45cfd73067 nodeName:}" failed. No retries permitted until 2025-12-05 01:26:52.022581272 +0000 UTC m=+987.361973561 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist") pod "speaker-wxh7x" (UID: "44aa572b-ef2b-4b3a-83ef-9a45cfd73067") : secret "metallb-memberlist" not found Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.025804 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1652627-99c3-4670-84ec-c770bf76a4b4-metrics-certs\") pod \"controller-f8648f98b-dw2lj\" (UID: \"b1652627-99c3-4670-84ec-c770bf76a4b4\") " pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.026431 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-metrics-certs\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.062578 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.215481 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.436849 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-dw2lj"] Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.836920 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-dw2lj" event={"ID":"b1652627-99c3-4670-84ec-c770bf76a4b4","Type":"ContainerStarted","Data":"cb3b9834b80a5b41d65604ea6d4151904a8b88ada2f1a9b148116229e32adcf7"} Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.837207 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-dw2lj" event={"ID":"b1652627-99c3-4670-84ec-c770bf76a4b4","Type":"ContainerStarted","Data":"5a14cb314465244f83574565775543931b7a9ba593622aab1e11d8f94164bec4"} Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.837220 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-dw2lj" event={"ID":"b1652627-99c3-4670-84ec-c770bf76a4b4","Type":"ContainerStarted","Data":"1021fa4af4e22e6621513ba13f9d73a0a16ddbcd68aff010e09725abc95a4008"} Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.838191 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.839111 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerStarted","Data":"9ebd999b0e05e9e3436b3c5eedd08920da4727d4f275e33ff88cbbf730ea2190"} Dec 05 01:26:51 crc kubenswrapper[4665]: I1205 01:26:51.854897 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-dw2lj" podStartSLOduration=1.854877817 podStartE2EDuration="1.854877817s" podCreationTimestamp="2025-12-05 01:26:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:26:51.852764165 +0000 UTC m=+987.192156484" watchObservedRunningTime="2025-12-05 01:26:51.854877817 +0000 UTC m=+987.194270126" Dec 05 01:26:52 crc kubenswrapper[4665]: I1205 01:26:52.033815 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:52 crc kubenswrapper[4665]: I1205 01:26:52.042238 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/44aa572b-ef2b-4b3a-83ef-9a45cfd73067-memberlist\") pod \"speaker-wxh7x\" (UID: \"44aa572b-ef2b-4b3a-83ef-9a45cfd73067\") " pod="metallb-system/speaker-wxh7x" Dec 05 01:26:52 crc kubenswrapper[4665]: I1205 01:26:52.110760 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-wxh7x" Dec 05 01:26:52 crc kubenswrapper[4665]: W1205 01:26:52.146003 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod44aa572b_ef2b_4b3a_83ef_9a45cfd73067.slice/crio-0cf0ca7bec1a61d1fbbdbb882d227d694195916d747ccf19c0ff5d3c554d8d6d WatchSource:0}: Error finding container 0cf0ca7bec1a61d1fbbdbb882d227d694195916d747ccf19c0ff5d3c554d8d6d: Status 404 returned error can't find the container with id 0cf0ca7bec1a61d1fbbdbb882d227d694195916d747ccf19c0ff5d3c554d8d6d Dec 05 01:26:52 crc kubenswrapper[4665]: I1205 01:26:52.849685 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wxh7x" event={"ID":"44aa572b-ef2b-4b3a-83ef-9a45cfd73067","Type":"ContainerStarted","Data":"3f08335b50dd30dc0b26ddb491c3ca5bb05facc6f4fb7842e37cb166c44a55c8"} Dec 05 01:26:52 crc kubenswrapper[4665]: I1205 01:26:52.850715 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wxh7x" event={"ID":"44aa572b-ef2b-4b3a-83ef-9a45cfd73067","Type":"ContainerStarted","Data":"0cf0ca7bec1a61d1fbbdbb882d227d694195916d747ccf19c0ff5d3c554d8d6d"} Dec 05 01:26:53 crc kubenswrapper[4665]: I1205 01:26:53.873425 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-wxh7x" event={"ID":"44aa572b-ef2b-4b3a-83ef-9a45cfd73067","Type":"ContainerStarted","Data":"4b044a59c3cc9a5166e435976b5bc896394db05df88a2075e39d310745505ec0"} Dec 05 01:26:53 crc kubenswrapper[4665]: I1205 01:26:53.873485 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-wxh7x" Dec 05 01:26:53 crc kubenswrapper[4665]: I1205 01:26:53.900063 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-wxh7x" podStartSLOduration=3.900047736 podStartE2EDuration="3.900047736s" podCreationTimestamp="2025-12-05 01:26:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:26:53.897798212 +0000 UTC m=+989.237190511" watchObservedRunningTime="2025-12-05 01:26:53.900047736 +0000 UTC m=+989.239440035" Dec 05 01:26:59 crc kubenswrapper[4665]: I1205 01:26:59.922966 4665 generic.go:334] "Generic (PLEG): container finished" podID="308f300a-520c-4c85-9cbd-dac3c432bdc1" containerID="8d15093f7e8840b19c3e254c36c88469188c35a17d58ac2d0f5d150df0e6ad7e" exitCode=0 Dec 05 01:26:59 crc kubenswrapper[4665]: I1205 01:26:59.923149 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerDied","Data":"8d15093f7e8840b19c3e254c36c88469188c35a17d58ac2d0f5d150df0e6ad7e"} Dec 05 01:26:59 crc kubenswrapper[4665]: I1205 01:26:59.926001 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" event={"ID":"f444b2fb-024e-4f65-84cc-4bc16a3cc6a9","Type":"ContainerStarted","Data":"69fb45b9d09c9b5d0efacee7387e80c1b0fc1f310897de41f3815f222c34b0c3"} Dec 05 01:26:59 crc kubenswrapper[4665]: I1205 01:26:59.926130 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:26:59 crc kubenswrapper[4665]: I1205 01:26:59.994492 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" podStartSLOduration=1.08780771 podStartE2EDuration="9.994474111s" podCreationTimestamp="2025-12-05 01:26:50 +0000 UTC" firstStartedPulling="2025-12-05 01:26:50.749762479 +0000 UTC m=+986.089154778" lastFinishedPulling="2025-12-05 01:26:59.65642888 +0000 UTC m=+994.995821179" observedRunningTime="2025-12-05 01:26:59.992401952 +0000 UTC m=+995.331794251" watchObservedRunningTime="2025-12-05 01:26:59.994474111 +0000 UTC m=+995.333866410" Dec 05 01:27:00 crc kubenswrapper[4665]: I1205 01:27:00.932233 4665 generic.go:334] "Generic (PLEG): container finished" podID="308f300a-520c-4c85-9cbd-dac3c432bdc1" containerID="c936b4564323fbf88498c6c01b9785ff3b230bb54878908fa2b9c7a4e84d0e11" exitCode=0 Dec 05 01:27:00 crc kubenswrapper[4665]: I1205 01:27:00.932313 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerDied","Data":"c936b4564323fbf88498c6c01b9785ff3b230bb54878908fa2b9c7a4e84d0e11"} Dec 05 01:27:01 crc kubenswrapper[4665]: I1205 01:27:01.220204 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-dw2lj" Dec 05 01:27:01 crc kubenswrapper[4665]: I1205 01:27:01.941587 4665 generic.go:334] "Generic (PLEG): container finished" podID="308f300a-520c-4c85-9cbd-dac3c432bdc1" containerID="67dd6550eecc7d43fd331e71044bc7a760206e6a92add019c8450df36590e112" exitCode=0 Dec 05 01:27:01 crc kubenswrapper[4665]: I1205 01:27:01.941912 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerDied","Data":"67dd6550eecc7d43fd331e71044bc7a760206e6a92add019c8450df36590e112"} Dec 05 01:27:02 crc kubenswrapper[4665]: I1205 01:27:02.113691 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-wxh7x" Dec 05 01:27:02 crc kubenswrapper[4665]: I1205 01:27:02.954036 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerStarted","Data":"aeae9ad2d4c7d2da6fe21afa2192a5e0aa9379e39891f8f9d6d54f86cea63317"} Dec 05 01:27:02 crc kubenswrapper[4665]: I1205 01:27:02.954346 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerStarted","Data":"e4fdd21dcfdd0a95562920a9ed707dd39c230f1f6e05e9e962f58dce95167137"} Dec 05 01:27:02 crc kubenswrapper[4665]: I1205 01:27:02.954359 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerStarted","Data":"ed1e95cc776c9fbf8ffde83f519a32071673b824d28f8c31a4605f3f807d93ce"} Dec 05 01:27:02 crc kubenswrapper[4665]: I1205 01:27:02.954371 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerStarted","Data":"6fafeee6974844f690e3b22499b7b9052bfd9fcb5c7f63c1f7fb404690567fa1"} Dec 05 01:27:02 crc kubenswrapper[4665]: I1205 01:27:02.954381 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerStarted","Data":"abeb2a3a9e9012345ac44d79c084d4170097e4c5e4388aa56efda6b4b8180b94"} Dec 05 01:27:02 crc kubenswrapper[4665]: I1205 01:27:02.954392 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-p5jk7" event={"ID":"308f300a-520c-4c85-9cbd-dac3c432bdc1","Type":"ContainerStarted","Data":"a82129a19d2903d8643772b691a4c53bdca288ffc488213b2e5d12fb44d3b5c8"} Dec 05 01:27:03 crc kubenswrapper[4665]: I1205 01:27:03.961389 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.365214 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-p5jk7" podStartSLOduration=7.088900937 podStartE2EDuration="15.365197631s" podCreationTimestamp="2025-12-05 01:26:50 +0000 UTC" firstStartedPulling="2025-12-05 01:26:51.397879345 +0000 UTC m=+986.737271634" lastFinishedPulling="2025-12-05 01:26:59.674176029 +0000 UTC m=+995.013568328" observedRunningTime="2025-12-05 01:27:03.999186238 +0000 UTC m=+999.338578537" watchObservedRunningTime="2025-12-05 01:27:05.365197631 +0000 UTC m=+1000.704589930" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.366911 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-5svhw"] Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.367651 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5svhw" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.377489 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-fjq68" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.377522 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.377667 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.383684 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5svhw"] Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.539098 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k2m8b\" (UniqueName: \"kubernetes.io/projected/440d1620-c2b3-4b1d-ae1c-002c2baf5557-kube-api-access-k2m8b\") pod \"openstack-operator-index-5svhw\" (UID: \"440d1620-c2b3-4b1d-ae1c-002c2baf5557\") " pod="openstack-operators/openstack-operator-index-5svhw" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.640193 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k2m8b\" (UniqueName: \"kubernetes.io/projected/440d1620-c2b3-4b1d-ae1c-002c2baf5557-kube-api-access-k2m8b\") pod \"openstack-operator-index-5svhw\" (UID: \"440d1620-c2b3-4b1d-ae1c-002c2baf5557\") " pod="openstack-operators/openstack-operator-index-5svhw" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.668633 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k2m8b\" (UniqueName: \"kubernetes.io/projected/440d1620-c2b3-4b1d-ae1c-002c2baf5557-kube-api-access-k2m8b\") pod \"openstack-operator-index-5svhw\" (UID: \"440d1620-c2b3-4b1d-ae1c-002c2baf5557\") " pod="openstack-operators/openstack-operator-index-5svhw" Dec 05 01:27:05 crc kubenswrapper[4665]: I1205 01:27:05.691395 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5svhw" Dec 05 01:27:06 crc kubenswrapper[4665]: I1205 01:27:06.064149 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:27:06 crc kubenswrapper[4665]: I1205 01:27:06.112727 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:27:06 crc kubenswrapper[4665]: I1205 01:27:06.169842 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-5svhw"] Dec 05 01:27:06 crc kubenswrapper[4665]: I1205 01:27:06.984598 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5svhw" event={"ID":"440d1620-c2b3-4b1d-ae1c-002c2baf5557","Type":"ContainerStarted","Data":"9b5642e1087c99e4cf0dbedfd01671b23a0a3d1b7c3b9a331c43844d4f9dd116"} Dec 05 01:27:08 crc kubenswrapper[4665]: I1205 01:27:08.537506 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-5svhw"] Dec 05 01:27:08 crc kubenswrapper[4665]: I1205 01:27:08.996189 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5svhw" event={"ID":"440d1620-c2b3-4b1d-ae1c-002c2baf5557","Type":"ContainerStarted","Data":"a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02"} Dec 05 01:27:08 crc kubenswrapper[4665]: I1205 01:27:08.996778 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-5svhw" podUID="440d1620-c2b3-4b1d-ae1c-002c2baf5557" containerName="registry-server" containerID="cri-o://a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02" gracePeriod=2 Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.015887 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-5svhw" podStartSLOduration=1.346646927 podStartE2EDuration="4.015848448s" podCreationTimestamp="2025-12-05 01:27:05 +0000 UTC" firstStartedPulling="2025-12-05 01:27:06.188412077 +0000 UTC m=+1001.527804376" lastFinishedPulling="2025-12-05 01:27:08.857613598 +0000 UTC m=+1004.197005897" observedRunningTime="2025-12-05 01:27:09.013177093 +0000 UTC m=+1004.352569392" watchObservedRunningTime="2025-12-05 01:27:09.015848448 +0000 UTC m=+1004.355240747" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.153706 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-pwd7r"] Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.154740 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.169416 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pwd7r"] Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.292941 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ghvsp\" (UniqueName: \"kubernetes.io/projected/68ae69df-a135-4ab5-b79c-47268f37c17f-kube-api-access-ghvsp\") pod \"openstack-operator-index-pwd7r\" (UID: \"68ae69df-a135-4ab5-b79c-47268f37c17f\") " pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.357737 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-5svhw_440d1620-c2b3-4b1d-ae1c-002c2baf5557/registry-server/0.log" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.357809 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5svhw" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.394516 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ghvsp\" (UniqueName: \"kubernetes.io/projected/68ae69df-a135-4ab5-b79c-47268f37c17f-kube-api-access-ghvsp\") pod \"openstack-operator-index-pwd7r\" (UID: \"68ae69df-a135-4ab5-b79c-47268f37c17f\") " pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.427622 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ghvsp\" (UniqueName: \"kubernetes.io/projected/68ae69df-a135-4ab5-b79c-47268f37c17f-kube-api-access-ghvsp\") pod \"openstack-operator-index-pwd7r\" (UID: \"68ae69df-a135-4ab5-b79c-47268f37c17f\") " pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.475921 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.495713 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k2m8b\" (UniqueName: \"kubernetes.io/projected/440d1620-c2b3-4b1d-ae1c-002c2baf5557-kube-api-access-k2m8b\") pod \"440d1620-c2b3-4b1d-ae1c-002c2baf5557\" (UID: \"440d1620-c2b3-4b1d-ae1c-002c2baf5557\") " Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.498471 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/440d1620-c2b3-4b1d-ae1c-002c2baf5557-kube-api-access-k2m8b" (OuterVolumeSpecName: "kube-api-access-k2m8b") pod "440d1620-c2b3-4b1d-ae1c-002c2baf5557" (UID: "440d1620-c2b3-4b1d-ae1c-002c2baf5557"). InnerVolumeSpecName "kube-api-access-k2m8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.598438 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k2m8b\" (UniqueName: \"kubernetes.io/projected/440d1620-c2b3-4b1d-ae1c-002c2baf5557-kube-api-access-k2m8b\") on node \"crc\" DevicePath \"\"" Dec 05 01:27:09 crc kubenswrapper[4665]: I1205 01:27:09.881970 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-pwd7r"] Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.013086 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-5svhw_440d1620-c2b3-4b1d-ae1c-002c2baf5557/registry-server/0.log" Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.013454 4665 generic.go:334] "Generic (PLEG): container finished" podID="440d1620-c2b3-4b1d-ae1c-002c2baf5557" containerID="a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02" exitCode=2 Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.013548 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5svhw" event={"ID":"440d1620-c2b3-4b1d-ae1c-002c2baf5557","Type":"ContainerDied","Data":"a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02"} Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.013583 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-5svhw" event={"ID":"440d1620-c2b3-4b1d-ae1c-002c2baf5557","Type":"ContainerDied","Data":"9b5642e1087c99e4cf0dbedfd01671b23a0a3d1b7c3b9a331c43844d4f9dd116"} Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.013606 4665 scope.go:117] "RemoveContainer" containerID="a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02" Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.013754 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-5svhw" Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.026970 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pwd7r" event={"ID":"68ae69df-a135-4ab5-b79c-47268f37c17f","Type":"ContainerStarted","Data":"5ce3fbde302bb51d54e6caba4762f70ffa3a00a650896c6534d0d308d74cd3c1"} Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.053478 4665 scope.go:117] "RemoveContainer" containerID="a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02" Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.055865 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-5svhw"] Dec 05 01:27:10 crc kubenswrapper[4665]: E1205 01:27:10.056859 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02\": container with ID starting with a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02 not found: ID does not exist" containerID="a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02" Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.056913 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02"} err="failed to get container status \"a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02\": rpc error: code = NotFound desc = could not find container \"a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02\": container with ID starting with a24bf52510ce4d06f308e28742db5f861798403f7e9664523fa6cba772b12e02 not found: ID does not exist" Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.066700 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-5svhw"] Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.471873 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-pchdx" Dec 05 01:27:10 crc kubenswrapper[4665]: I1205 01:27:10.901340 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="440d1620-c2b3-4b1d-ae1c-002c2baf5557" path="/var/lib/kubelet/pods/440d1620-c2b3-4b1d-ae1c-002c2baf5557/volumes" Dec 05 01:27:11 crc kubenswrapper[4665]: I1205 01:27:11.033642 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-pwd7r" event={"ID":"68ae69df-a135-4ab5-b79c-47268f37c17f","Type":"ContainerStarted","Data":"d29469fe426888e0398ce5c2263098f41fe35b6f1502f40a0af4d7bd14592095"} Dec 05 01:27:11 crc kubenswrapper[4665]: I1205 01:27:11.053177 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-pwd7r" podStartSLOduration=1.998824082 podStartE2EDuration="2.053156877s" podCreationTimestamp="2025-12-05 01:27:09 +0000 UTC" firstStartedPulling="2025-12-05 01:27:09.890889137 +0000 UTC m=+1005.230281436" lastFinishedPulling="2025-12-05 01:27:09.945221932 +0000 UTC m=+1005.284614231" observedRunningTime="2025-12-05 01:27:11.046327812 +0000 UTC m=+1006.385720111" watchObservedRunningTime="2025-12-05 01:27:11.053156877 +0000 UTC m=+1006.392549166" Dec 05 01:27:14 crc kubenswrapper[4665]: I1205 01:27:14.933923 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:27:14 crc kubenswrapper[4665]: I1205 01:27:14.933986 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:27:19 crc kubenswrapper[4665]: I1205 01:27:19.476876 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:19 crc kubenswrapper[4665]: I1205 01:27:19.477273 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:19 crc kubenswrapper[4665]: I1205 01:27:19.523790 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:20 crc kubenswrapper[4665]: I1205 01:27:20.112492 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-pwd7r" Dec 05 01:27:20 crc kubenswrapper[4665]: I1205 01:27:20.977271 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb"] Dec 05 01:27:20 crc kubenswrapper[4665]: E1205 01:27:20.977692 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="440d1620-c2b3-4b1d-ae1c-002c2baf5557" containerName="registry-server" Dec 05 01:27:20 crc kubenswrapper[4665]: I1205 01:27:20.977709 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="440d1620-c2b3-4b1d-ae1c-002c2baf5557" containerName="registry-server" Dec 05 01:27:20 crc kubenswrapper[4665]: I1205 01:27:20.977846 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="440d1620-c2b3-4b1d-ae1c-002c2baf5557" containerName="registry-server" Dec 05 01:27:20 crc kubenswrapper[4665]: I1205 01:27:20.978947 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:20 crc kubenswrapper[4665]: I1205 01:27:20.982174 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-87htq" Dec 05 01:27:20 crc kubenswrapper[4665]: I1205 01:27:20.988106 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb"] Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.067958 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-p5jk7" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.143578 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2qt26\" (UniqueName: \"kubernetes.io/projected/3fc63545-1530-48cd-a790-f36d5cd5d73c-kube-api-access-2qt26\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.143623 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-bundle\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.143861 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-util\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.244682 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-bundle\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.244821 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-util\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.244847 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2qt26\" (UniqueName: \"kubernetes.io/projected/3fc63545-1530-48cd-a790-f36d5cd5d73c-kube-api-access-2qt26\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.245872 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-bundle\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.246540 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-util\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.270324 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2qt26\" (UniqueName: \"kubernetes.io/projected/3fc63545-1530-48cd-a790-f36d5cd5d73c-kube-api-access-2qt26\") pod \"58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.348540 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:21 crc kubenswrapper[4665]: I1205 01:27:21.754605 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb"] Dec 05 01:27:21 crc kubenswrapper[4665]: W1205 01:27:21.758426 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3fc63545_1530_48cd_a790_f36d5cd5d73c.slice/crio-c9fb3dbf9b4b5e02af8309a617d34e0ee9e3ac1660f293cd23cc24a7700b80a0 WatchSource:0}: Error finding container c9fb3dbf9b4b5e02af8309a617d34e0ee9e3ac1660f293cd23cc24a7700b80a0: Status 404 returned error can't find the container with id c9fb3dbf9b4b5e02af8309a617d34e0ee9e3ac1660f293cd23cc24a7700b80a0 Dec 05 01:27:22 crc kubenswrapper[4665]: I1205 01:27:22.093890 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" event={"ID":"3fc63545-1530-48cd-a790-f36d5cd5d73c","Type":"ContainerStarted","Data":"c9fb3dbf9b4b5e02af8309a617d34e0ee9e3ac1660f293cd23cc24a7700b80a0"} Dec 05 01:27:23 crc kubenswrapper[4665]: I1205 01:27:23.101761 4665 generic.go:334] "Generic (PLEG): container finished" podID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerID="ec5348a9cfc05194c12c1e659f58e4fe9ba2c6a591104b5a44883e138a85cf92" exitCode=0 Dec 05 01:27:23 crc kubenswrapper[4665]: I1205 01:27:23.101810 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" event={"ID":"3fc63545-1530-48cd-a790-f36d5cd5d73c","Type":"ContainerDied","Data":"ec5348a9cfc05194c12c1e659f58e4fe9ba2c6a591104b5a44883e138a85cf92"} Dec 05 01:27:24 crc kubenswrapper[4665]: I1205 01:27:24.112082 4665 generic.go:334] "Generic (PLEG): container finished" podID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerID="d5482f6889321658efaf91d0895a1c22f8c3f63f0dac373c97fab6c6ab6b0181" exitCode=0 Dec 05 01:27:24 crc kubenswrapper[4665]: I1205 01:27:24.112162 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" event={"ID":"3fc63545-1530-48cd-a790-f36d5cd5d73c","Type":"ContainerDied","Data":"d5482f6889321658efaf91d0895a1c22f8c3f63f0dac373c97fab6c6ab6b0181"} Dec 05 01:27:25 crc kubenswrapper[4665]: I1205 01:27:25.120981 4665 generic.go:334] "Generic (PLEG): container finished" podID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerID="34775c715ba1c52faa00c7a84d9dec3ec2b0fc057e81d0b7a13a41b9a734c3b3" exitCode=0 Dec 05 01:27:25 crc kubenswrapper[4665]: I1205 01:27:25.121055 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" event={"ID":"3fc63545-1530-48cd-a790-f36d5cd5d73c","Type":"ContainerDied","Data":"34775c715ba1c52faa00c7a84d9dec3ec2b0fc057e81d0b7a13a41b9a734c3b3"} Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.401586 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.512905 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-util\") pod \"3fc63545-1530-48cd-a790-f36d5cd5d73c\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.512979 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-bundle\") pod \"3fc63545-1530-48cd-a790-f36d5cd5d73c\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.513007 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2qt26\" (UniqueName: \"kubernetes.io/projected/3fc63545-1530-48cd-a790-f36d5cd5d73c-kube-api-access-2qt26\") pod \"3fc63545-1530-48cd-a790-f36d5cd5d73c\" (UID: \"3fc63545-1530-48cd-a790-f36d5cd5d73c\") " Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.514097 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-bundle" (OuterVolumeSpecName: "bundle") pod "3fc63545-1530-48cd-a790-f36d5cd5d73c" (UID: "3fc63545-1530-48cd-a790-f36d5cd5d73c"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.519551 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3fc63545-1530-48cd-a790-f36d5cd5d73c-kube-api-access-2qt26" (OuterVolumeSpecName: "kube-api-access-2qt26") pod "3fc63545-1530-48cd-a790-f36d5cd5d73c" (UID: "3fc63545-1530-48cd-a790-f36d5cd5d73c"). InnerVolumeSpecName "kube-api-access-2qt26". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.526824 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-util" (OuterVolumeSpecName: "util") pod "3fc63545-1530-48cd-a790-f36d5cd5d73c" (UID: "3fc63545-1530-48cd-a790-f36d5cd5d73c"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.614418 4665 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.614444 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2qt26\" (UniqueName: \"kubernetes.io/projected/3fc63545-1530-48cd-a790-f36d5cd5d73c-kube-api-access-2qt26\") on node \"crc\" DevicePath \"\"" Dec 05 01:27:26 crc kubenswrapper[4665]: I1205 01:27:26.614456 4665 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3fc63545-1530-48cd-a790-f36d5cd5d73c-util\") on node \"crc\" DevicePath \"\"" Dec 05 01:27:27 crc kubenswrapper[4665]: I1205 01:27:27.140540 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" event={"ID":"3fc63545-1530-48cd-a790-f36d5cd5d73c","Type":"ContainerDied","Data":"c9fb3dbf9b4b5e02af8309a617d34e0ee9e3ac1660f293cd23cc24a7700b80a0"} Dec 05 01:27:27 crc kubenswrapper[4665]: I1205 01:27:27.140983 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c9fb3dbf9b4b5e02af8309a617d34e0ee9e3ac1660f293cd23cc24a7700b80a0" Dec 05 01:27:27 crc kubenswrapper[4665]: I1205 01:27:27.141098 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb" Dec 05 01:27:32 crc kubenswrapper[4665]: I1205 01:27:32.987012 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn"] Dec 05 01:27:32 crc kubenswrapper[4665]: E1205 01:27:32.988188 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerName="pull" Dec 05 01:27:32 crc kubenswrapper[4665]: I1205 01:27:32.988201 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerName="pull" Dec 05 01:27:32 crc kubenswrapper[4665]: E1205 01:27:32.988212 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerName="util" Dec 05 01:27:32 crc kubenswrapper[4665]: I1205 01:27:32.988218 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerName="util" Dec 05 01:27:32 crc kubenswrapper[4665]: E1205 01:27:32.988250 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerName="extract" Dec 05 01:27:32 crc kubenswrapper[4665]: I1205 01:27:32.988257 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerName="extract" Dec 05 01:27:32 crc kubenswrapper[4665]: I1205 01:27:32.988584 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="3fc63545-1530-48cd-a790-f36d5cd5d73c" containerName="extract" Dec 05 01:27:32 crc kubenswrapper[4665]: I1205 01:27:32.988967 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" Dec 05 01:27:32 crc kubenswrapper[4665]: I1205 01:27:32.990897 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-m5225" Dec 05 01:27:33 crc kubenswrapper[4665]: I1205 01:27:33.079235 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn"] Dec 05 01:27:33 crc kubenswrapper[4665]: I1205 01:27:33.089899 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p58gq\" (UniqueName: \"kubernetes.io/projected/119e3b58-5f63-441a-b2d9-9ea2e83df2b8-kube-api-access-p58gq\") pod \"openstack-operator-controller-operator-6f79d9dccc-zkrtn\" (UID: \"119e3b58-5f63-441a-b2d9-9ea2e83df2b8\") " pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" Dec 05 01:27:33 crc kubenswrapper[4665]: I1205 01:27:33.191543 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p58gq\" (UniqueName: \"kubernetes.io/projected/119e3b58-5f63-441a-b2d9-9ea2e83df2b8-kube-api-access-p58gq\") pod \"openstack-operator-controller-operator-6f79d9dccc-zkrtn\" (UID: \"119e3b58-5f63-441a-b2d9-9ea2e83df2b8\") " pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" Dec 05 01:27:33 crc kubenswrapper[4665]: I1205 01:27:33.218829 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p58gq\" (UniqueName: \"kubernetes.io/projected/119e3b58-5f63-441a-b2d9-9ea2e83df2b8-kube-api-access-p58gq\") pod \"openstack-operator-controller-operator-6f79d9dccc-zkrtn\" (UID: \"119e3b58-5f63-441a-b2d9-9ea2e83df2b8\") " pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" Dec 05 01:27:33 crc kubenswrapper[4665]: I1205 01:27:33.304274 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" Dec 05 01:27:33 crc kubenswrapper[4665]: I1205 01:27:33.560112 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn"] Dec 05 01:27:34 crc kubenswrapper[4665]: I1205 01:27:34.191969 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" event={"ID":"119e3b58-5f63-441a-b2d9-9ea2e83df2b8","Type":"ContainerStarted","Data":"8b7c7ef6c79ab9fe71b8819e483f9aed312bb2889fbf4feaa51cb6ece613d330"} Dec 05 01:27:39 crc kubenswrapper[4665]: I1205 01:27:39.218780 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" event={"ID":"119e3b58-5f63-441a-b2d9-9ea2e83df2b8","Type":"ContainerStarted","Data":"6e4379a1a21933059957aafc8800a9ceb723b5479191e85313ecd4641c42bcdd"} Dec 05 01:27:39 crc kubenswrapper[4665]: I1205 01:27:39.219319 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" Dec 05 01:27:39 crc kubenswrapper[4665]: I1205 01:27:39.251097 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" podStartSLOduration=2.46941074 podStartE2EDuration="7.251075541s" podCreationTimestamp="2025-12-05 01:27:32 +0000 UTC" firstStartedPulling="2025-12-05 01:27:33.568914574 +0000 UTC m=+1028.908306873" lastFinishedPulling="2025-12-05 01:27:38.350579375 +0000 UTC m=+1033.689971674" observedRunningTime="2025-12-05 01:27:39.249029531 +0000 UTC m=+1034.588421840" watchObservedRunningTime="2025-12-05 01:27:39.251075541 +0000 UTC m=+1034.590467840" Dec 05 01:27:43 crc kubenswrapper[4665]: I1205 01:27:43.307242 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-6f79d9dccc-zkrtn" Dec 05 01:27:44 crc kubenswrapper[4665]: I1205 01:27:44.922833 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:27:44 crc kubenswrapper[4665]: I1205 01:27:44.922893 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:27:44 crc kubenswrapper[4665]: I1205 01:27:44.922944 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:27:44 crc kubenswrapper[4665]: I1205 01:27:44.923626 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1cd3d1505d7ab823a5b8b16aa7787ab1595f7aa23355a3b5bb9a7a6dd4cb7347"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:27:44 crc kubenswrapper[4665]: I1205 01:27:44.923694 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://1cd3d1505d7ab823a5b8b16aa7787ab1595f7aa23355a3b5bb9a7a6dd4cb7347" gracePeriod=600 Dec 05 01:27:45 crc kubenswrapper[4665]: I1205 01:27:45.259134 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="1cd3d1505d7ab823a5b8b16aa7787ab1595f7aa23355a3b5bb9a7a6dd4cb7347" exitCode=0 Dec 05 01:27:45 crc kubenswrapper[4665]: I1205 01:27:45.259208 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"1cd3d1505d7ab823a5b8b16aa7787ab1595f7aa23355a3b5bb9a7a6dd4cb7347"} Dec 05 01:27:45 crc kubenswrapper[4665]: I1205 01:27:45.259468 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"ddea2cf7dfc7d76e1a9cf4f232382b2b597e0edaf17f47e1250c2d22c5805549"} Dec 05 01:27:45 crc kubenswrapper[4665]: I1205 01:27:45.259498 4665 scope.go:117] "RemoveContainer" containerID="61048277fbfd2d8678771fdadef582a23356aff6798430a3f97d91a5d469245e" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.770112 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.771601 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.773337 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-ts7km" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.779805 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.780833 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.787834 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-9m49l" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.789281 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.793989 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.814331 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.815207 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.817763 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-q6xpr" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.840711 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qsdzr\" (UniqueName: \"kubernetes.io/projected/fa7a4766-c028-4114-b979-a7900e21103c-kube-api-access-qsdzr\") pod \"barbican-operator-controller-manager-7d9dfd778-8hblt\" (UID: \"fa7a4766-c028-4114-b979-a7900e21103c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.840803 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f69np\" (UniqueName: \"kubernetes.io/projected/669a406d-6e51-4ead-89ff-4a1df7cb7308-kube-api-access-f69np\") pod \"cinder-operator-controller-manager-859b6ccc6-ptwdk\" (UID: \"669a406d-6e51-4ead-89ff-4a1df7cb7308\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.840826 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wblxt\" (UniqueName: \"kubernetes.io/projected/c8e9557a-3433-413b-a5b2-9137f2b9c584-kube-api-access-wblxt\") pod \"designate-operator-controller-manager-78b4bc895b-6bsqf\" (UID: \"c8e9557a-3433-413b-a5b2-9137f2b9c584\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.848519 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.849470 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.851899 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-z7f4l" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.854192 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.884955 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.902197 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.903099 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.904507 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-strqh" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.908575 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.909623 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.913575 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-tgj8z" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.927978 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.942279 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.943480 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.943846 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qsdzr\" (UniqueName: \"kubernetes.io/projected/fa7a4766-c028-4114-b979-a7900e21103c-kube-api-access-qsdzr\") pod \"barbican-operator-controller-manager-7d9dfd778-8hblt\" (UID: \"fa7a4766-c028-4114-b979-a7900e21103c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.943893 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f69np\" (UniqueName: \"kubernetes.io/projected/669a406d-6e51-4ead-89ff-4a1df7cb7308-kube-api-access-f69np\") pod \"cinder-operator-controller-manager-859b6ccc6-ptwdk\" (UID: \"669a406d-6e51-4ead-89ff-4a1df7cb7308\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.943914 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wblxt\" (UniqueName: \"kubernetes.io/projected/c8e9557a-3433-413b-a5b2-9137f2b9c584-kube-api-access-wblxt\") pod \"designate-operator-controller-manager-78b4bc895b-6bsqf\" (UID: \"c8e9557a-3433-413b-a5b2-9137f2b9c584\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.943945 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b59bf\" (UniqueName: \"kubernetes.io/projected/8bd45d32-e93b-415b-a885-3926454418c9-kube-api-access-b59bf\") pod \"horizon-operator-controller-manager-68c6d99b8f-q6zxf\" (UID: \"8bd45d32-e93b-415b-a885-3926454418c9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.943968 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8fpkn\" (UniqueName: \"kubernetes.io/projected/277c6945-0cbb-4a0a-8e22-e990d76da759-kube-api-access-8fpkn\") pod \"glance-operator-controller-manager-77987cd8cd-ks65z\" (UID: \"277c6945-0cbb-4a0a-8e22-e990d76da759\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.944049 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fcf9h\" (UniqueName: \"kubernetes.io/projected/93b5a195-1f87-4eb3-8511-916e652b3913-kube-api-access-fcf9h\") pod \"heat-operator-controller-manager-5f64f6f8bb-dz2g4\" (UID: \"93b5a195-1f87-4eb3-8511-916e652b3913\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.950985 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.951116 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-kjhhv" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.954407 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.967085 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf"] Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.975406 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wblxt\" (UniqueName: \"kubernetes.io/projected/c8e9557a-3433-413b-a5b2-9137f2b9c584-kube-api-access-wblxt\") pod \"designate-operator-controller-manager-78b4bc895b-6bsqf\" (UID: \"c8e9557a-3433-413b-a5b2-9137f2b9c584\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" Dec 05 01:28:02 crc kubenswrapper[4665]: I1205 01:28:02.979597 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.002966 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.032174 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-gms9b" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.046209 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fcf9h\" (UniqueName: \"kubernetes.io/projected/93b5a195-1f87-4eb3-8511-916e652b3913-kube-api-access-fcf9h\") pod \"heat-operator-controller-manager-5f64f6f8bb-dz2g4\" (UID: \"93b5a195-1f87-4eb3-8511-916e652b3913\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.046261 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9g62f\" (UniqueName: \"kubernetes.io/projected/68e17c62-dc97-4bf6-b9de-340e03d5cbda-kube-api-access-9g62f\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.048468 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b59bf\" (UniqueName: \"kubernetes.io/projected/8bd45d32-e93b-415b-a885-3926454418c9-kube-api-access-b59bf\") pod \"horizon-operator-controller-manager-68c6d99b8f-q6zxf\" (UID: \"8bd45d32-e93b-415b-a885-3926454418c9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.048538 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8fpkn\" (UniqueName: \"kubernetes.io/projected/277c6945-0cbb-4a0a-8e22-e990d76da759-kube-api-access-8fpkn\") pod \"glance-operator-controller-manager-77987cd8cd-ks65z\" (UID: \"277c6945-0cbb-4a0a-8e22-e990d76da759\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.048578 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.048658 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnzsg\" (UniqueName: \"kubernetes.io/projected/3a85872e-618d-4847-aae0-1eb366f16003-kube-api-access-wnzsg\") pod \"ironic-operator-controller-manager-6c548fd776-9pbl9\" (UID: \"3a85872e-618d-4847-aae0-1eb366f16003\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.050584 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qsdzr\" (UniqueName: \"kubernetes.io/projected/fa7a4766-c028-4114-b979-a7900e21103c-kube-api-access-qsdzr\") pod \"barbican-operator-controller-manager-7d9dfd778-8hblt\" (UID: \"fa7a4766-c028-4114-b979-a7900e21103c\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.063913 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f69np\" (UniqueName: \"kubernetes.io/projected/669a406d-6e51-4ead-89ff-4a1df7cb7308-kube-api-access-f69np\") pod \"cinder-operator-controller-manager-859b6ccc6-ptwdk\" (UID: \"669a406d-6e51-4ead-89ff-4a1df7cb7308\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.063972 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.070879 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.072386 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.078134 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.079005 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-x4xxj" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.079091 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.082516 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.090775 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.092333 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.099732 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-499m4" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.100149 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-hw4fs" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.101616 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.109610 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8fpkn\" (UniqueName: \"kubernetes.io/projected/277c6945-0cbb-4a0a-8e22-e990d76da759-kube-api-access-8fpkn\") pod \"glance-operator-controller-manager-77987cd8cd-ks65z\" (UID: \"277c6945-0cbb-4a0a-8e22-e990d76da759\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.104616 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.128771 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.138082 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.150589 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9g62f\" (UniqueName: \"kubernetes.io/projected/68e17c62-dc97-4bf6-b9de-340e03d5cbda-kube-api-access-9g62f\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.150964 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.151083 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftzbm\" (UniqueName: \"kubernetes.io/projected/e7235695-da27-4f1d-afec-a6f2a3decc79-kube-api-access-ftzbm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-5qmj8\" (UID: \"e7235695-da27-4f1d-afec-a6f2a3decc79\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.151198 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mkqq6\" (UniqueName: \"kubernetes.io/projected/5c1d9b52-7fcd-4615-9faa-af55e4165ffb-kube-api-access-mkqq6\") pod \"keystone-operator-controller-manager-7765d96ddf-nfmtb\" (UID: \"5c1d9b52-7fcd-4615-9faa-af55e4165ffb\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.151352 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnzsg\" (UniqueName: \"kubernetes.io/projected/3a85872e-618d-4847-aae0-1eb366f16003-kube-api-access-wnzsg\") pod \"ironic-operator-controller-manager-6c548fd776-9pbl9\" (UID: \"3a85872e-618d-4847-aae0-1eb366f16003\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.151502 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t6pgn\" (UniqueName: \"kubernetes.io/projected/1fd940e7-8adc-4859-a763-58d909016fd6-kube-api-access-t6pgn\") pod \"manila-operator-controller-manager-7c79b5df47-dxk89\" (UID: \"1fd940e7-8adc-4859-a763-58d909016fd6\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" Dec 05 01:28:03 crc kubenswrapper[4665]: E1205 01:28:03.152326 4665 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:03 crc kubenswrapper[4665]: E1205 01:28:03.152491 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert podName:68e17c62-dc97-4bf6-b9de-340e03d5cbda nodeName:}" failed. No retries permitted until 2025-12-05 01:28:03.652470117 +0000 UTC m=+1058.991862416 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert") pod "infra-operator-controller-manager-57548d458d-lt2sv" (UID: "68e17c62-dc97-4bf6-b9de-340e03d5cbda") : secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.162988 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.168136 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.185053 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b59bf\" (UniqueName: \"kubernetes.io/projected/8bd45d32-e93b-415b-a885-3926454418c9-kube-api-access-b59bf\") pod \"horizon-operator-controller-manager-68c6d99b8f-q6zxf\" (UID: \"8bd45d32-e93b-415b-a885-3926454418c9\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.202080 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.205078 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.206172 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fcf9h\" (UniqueName: \"kubernetes.io/projected/93b5a195-1f87-4eb3-8511-916e652b3913-kube-api-access-fcf9h\") pod \"heat-operator-controller-manager-5f64f6f8bb-dz2g4\" (UID: \"93b5a195-1f87-4eb3-8511-916e652b3913\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.220639 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-472gn" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.231038 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnzsg\" (UniqueName: \"kubernetes.io/projected/3a85872e-618d-4847-aae0-1eb366f16003-kube-api-access-wnzsg\") pod \"ironic-operator-controller-manager-6c548fd776-9pbl9\" (UID: \"3a85872e-618d-4847-aae0-1eb366f16003\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.231322 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.235071 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9g62f\" (UniqueName: \"kubernetes.io/projected/68e17c62-dc97-4bf6-b9de-340e03d5cbda-kube-api-access-9g62f\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.242258 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.255816 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftzbm\" (UniqueName: \"kubernetes.io/projected/e7235695-da27-4f1d-afec-a6f2a3decc79-kube-api-access-ftzbm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-5qmj8\" (UID: \"e7235695-da27-4f1d-afec-a6f2a3decc79\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.255889 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mkqq6\" (UniqueName: \"kubernetes.io/projected/5c1d9b52-7fcd-4615-9faa-af55e4165ffb-kube-api-access-mkqq6\") pod \"keystone-operator-controller-manager-7765d96ddf-nfmtb\" (UID: \"5c1d9b52-7fcd-4615-9faa-af55e4165ffb\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.255935 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t6pgn\" (UniqueName: \"kubernetes.io/projected/1fd940e7-8adc-4859-a763-58d909016fd6-kube-api-access-t6pgn\") pod \"manila-operator-controller-manager-7c79b5df47-dxk89\" (UID: \"1fd940e7-8adc-4859-a763-58d909016fd6\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.282927 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.302822 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mkqq6\" (UniqueName: \"kubernetes.io/projected/5c1d9b52-7fcd-4615-9faa-af55e4165ffb-kube-api-access-mkqq6\") pod \"keystone-operator-controller-manager-7765d96ddf-nfmtb\" (UID: \"5c1d9b52-7fcd-4615-9faa-af55e4165ffb\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.316287 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftzbm\" (UniqueName: \"kubernetes.io/projected/e7235695-da27-4f1d-afec-a6f2a3decc79-kube-api-access-ftzbm\") pod \"mariadb-operator-controller-manager-56bbcc9d85-5qmj8\" (UID: \"e7235695-da27-4f1d-afec-a6f2a3decc79\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.331443 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t6pgn\" (UniqueName: \"kubernetes.io/projected/1fd940e7-8adc-4859-a763-58d909016fd6-kube-api-access-t6pgn\") pod \"manila-operator-controller-manager-7c79b5df47-dxk89\" (UID: \"1fd940e7-8adc-4859-a763-58d909016fd6\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.360111 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.361219 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.363732 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bt46x\" (UniqueName: \"kubernetes.io/projected/8d94649e-ea57-4b1a-9fb0-2b37b567cd77-kube-api-access-bt46x\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-fbmtc\" (UID: \"8d94649e-ea57-4b1a-9fb0-2b37b567cd77\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.363907 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.364392 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.391171 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-wkxv5" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.400059 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.401424 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.410497 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-nbbcb" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.427128 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.428527 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.447143 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-g8vvl" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.447490 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.465899 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bt46x\" (UniqueName: \"kubernetes.io/projected/8d94649e-ea57-4b1a-9fb0-2b37b567cd77-kube-api-access-bt46x\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-fbmtc\" (UID: \"8d94649e-ea57-4b1a-9fb0-2b37b567cd77\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.465934 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9hgk\" (UniqueName: \"kubernetes.io/projected/fb8b497b-5207-408c-9e30-e7169c4ccede-kube-api-access-l9hgk\") pod \"nova-operator-controller-manager-697bc559fc-fgjvz\" (UID: \"fb8b497b-5207-408c-9e30-e7169c4ccede\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.471358 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.472357 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.501855 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-kwcdj" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.502591 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.517043 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bt46x\" (UniqueName: \"kubernetes.io/projected/8d94649e-ea57-4b1a-9fb0-2b37b567cd77-kube-api-access-bt46x\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-fbmtc\" (UID: \"8d94649e-ea57-4b1a-9fb0-2b37b567cd77\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.535451 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.537204 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.560851 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fstjd"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.562723 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.566739 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.566819 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85m9b\" (UniqueName: \"kubernetes.io/projected/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-kube-api-access-85m9b\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.566842 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hnvfm\" (UniqueName: \"kubernetes.io/projected/764119ea-4228-4141-a7a7-faee0be8d052-kube-api-access-hnvfm\") pod \"octavia-operator-controller-manager-998648c74-dnqxs\" (UID: \"764119ea-4228-4141-a7a7-faee0be8d052\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.566866 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9hgk\" (UniqueName: \"kubernetes.io/projected/fb8b497b-5207-408c-9e30-e7169c4ccede-kube-api-access-l9hgk\") pod \"nova-operator-controller-manager-697bc559fc-fgjvz\" (UID: \"fb8b497b-5207-408c-9e30-e7169c4ccede\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.575064 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.575766 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-xt29w" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.577597 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.602376 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.607704 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.622947 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9hgk\" (UniqueName: \"kubernetes.io/projected/fb8b497b-5207-408c-9e30-e7169c4ccede-kube-api-access-l9hgk\") pod \"nova-operator-controller-manager-697bc559fc-fgjvz\" (UID: \"fb8b497b-5207-408c-9e30-e7169c4ccede\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.631850 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fstjd"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.659646 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.660799 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.670218 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-rfnc5" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.679626 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.679703 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85m9b\" (UniqueName: \"kubernetes.io/projected/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-kube-api-access-85m9b\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.679733 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hnvfm\" (UniqueName: \"kubernetes.io/projected/764119ea-4228-4141-a7a7-faee0be8d052-kube-api-access-hnvfm\") pod \"octavia-operator-controller-manager-998648c74-dnqxs\" (UID: \"764119ea-4228-4141-a7a7-faee0be8d052\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.679759 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bxmnq\" (UniqueName: \"kubernetes.io/projected/3933c0fc-ca36-42a1-b418-9db281576617-kube-api-access-bxmnq\") pod \"swift-operator-controller-manager-5f8c65bbfc-dlncm\" (UID: \"3933c0fc-ca36-42a1-b418-9db281576617\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.679806 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqdvh\" (UniqueName: \"kubernetes.io/projected/c874c72f-9ac0-4ce5-bf5c-fc9e983b725c-kube-api-access-kqdvh\") pod \"ovn-operator-controller-manager-b6456fdb6-nmpk2\" (UID: \"c874c72f-9ac0-4ce5-bf5c-fc9e983b725c\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.679842 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2nn4x\" (UniqueName: \"kubernetes.io/projected/8543ed45-b6f4-4f54-bc94-756bf6f031e6-kube-api-access-2nn4x\") pod \"placement-operator-controller-manager-78f8948974-fstjd\" (UID: \"8543ed45-b6f4-4f54-bc94-756bf6f031e6\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.679884 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:03 crc kubenswrapper[4665]: E1205 01:28:03.680017 4665 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:03 crc kubenswrapper[4665]: E1205 01:28:03.680080 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert podName:77dd89d3-29be-4d9c-ad35-a6281d6bd57f nodeName:}" failed. No retries permitted until 2025-12-05 01:28:04.180059634 +0000 UTC m=+1059.519451943 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" (UID: "77dd89d3-29be-4d9c-ad35-a6281d6bd57f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:03 crc kubenswrapper[4665]: E1205 01:28:03.680354 4665 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:03 crc kubenswrapper[4665]: E1205 01:28:03.680403 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert podName:68e17c62-dc97-4bf6-b9de-340e03d5cbda nodeName:}" failed. No retries permitted until 2025-12-05 01:28:04.680392491 +0000 UTC m=+1060.019784790 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert") pod "infra-operator-controller-manager-57548d458d-lt2sv" (UID: "68e17c62-dc97-4bf6-b9de-340e03d5cbda") : secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.741306 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hnvfm\" (UniqueName: \"kubernetes.io/projected/764119ea-4228-4141-a7a7-faee0be8d052-kube-api-access-hnvfm\") pod \"octavia-operator-controller-manager-998648c74-dnqxs\" (UID: \"764119ea-4228-4141-a7a7-faee0be8d052\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.741953 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.791329 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqdvh\" (UniqueName: \"kubernetes.io/projected/c874c72f-9ac0-4ce5-bf5c-fc9e983b725c-kube-api-access-kqdvh\") pod \"ovn-operator-controller-manager-b6456fdb6-nmpk2\" (UID: \"c874c72f-9ac0-4ce5-bf5c-fc9e983b725c\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.791435 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2nn4x\" (UniqueName: \"kubernetes.io/projected/8543ed45-b6f4-4f54-bc94-756bf6f031e6-kube-api-access-2nn4x\") pod \"placement-operator-controller-manager-78f8948974-fstjd\" (UID: \"8543ed45-b6f4-4f54-bc94-756bf6f031e6\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.791610 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bxmnq\" (UniqueName: \"kubernetes.io/projected/3933c0fc-ca36-42a1-b418-9db281576617-kube-api-access-bxmnq\") pod \"swift-operator-controller-manager-5f8c65bbfc-dlncm\" (UID: \"3933c0fc-ca36-42a1-b418-9db281576617\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.804353 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.812005 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.821903 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.828264 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-kqmgv" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.844905 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85m9b\" (UniqueName: \"kubernetes.io/projected/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-kube-api-access-85m9b\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.877315 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqdvh\" (UniqueName: \"kubernetes.io/projected/c874c72f-9ac0-4ce5-bf5c-fc9e983b725c-kube-api-access-kqdvh\") pod \"ovn-operator-controller-manager-b6456fdb6-nmpk2\" (UID: \"c874c72f-9ac0-4ce5-bf5c-fc9e983b725c\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.915819 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.916016 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bxmnq\" (UniqueName: \"kubernetes.io/projected/3933c0fc-ca36-42a1-b418-9db281576617-kube-api-access-bxmnq\") pod \"swift-operator-controller-manager-5f8c65bbfc-dlncm\" (UID: \"3933c0fc-ca36-42a1-b418-9db281576617\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.931994 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2nn4x\" (UniqueName: \"kubernetes.io/projected/8543ed45-b6f4-4f54-bc94-756bf6f031e6-kube-api-access-2nn4x\") pod \"placement-operator-controller-manager-78f8948974-fstjd\" (UID: \"8543ed45-b6f4-4f54-bc94-756bf6f031e6\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.937695 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.938279 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.976232 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.978193 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.987784 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4"] Dec 05 01:28:03 crc kubenswrapper[4665]: I1205 01:28:03.988879 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.001842 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv"] Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.002619 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-955zv" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.013165 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.019113 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-mbhwr" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.020886 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl8nz\" (UniqueName: \"kubernetes.io/projected/f353bd63-e193-4a26-8ba6-32f1eec034a8-kube-api-access-gl8nz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-6dh9j\" (UID: \"f353bd63-e193-4a26-8ba6-32f1eec034a8\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.033391 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4"] Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.066989 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.097283 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4"] Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.098330 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.104016 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.104196 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.104234 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-6677d" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.117381 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4"] Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.123755 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl8nz\" (UniqueName: \"kubernetes.io/projected/f353bd63-e193-4a26-8ba6-32f1eec034a8-kube-api-access-gl8nz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-6dh9j\" (UID: \"f353bd63-e193-4a26-8ba6-32f1eec034a8\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.123802 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xj59t\" (UniqueName: \"kubernetes.io/projected/5009fa32-5a01-46dc-9238-2f3c8ef7fddc-kube-api-access-xj59t\") pod \"watcher-operator-controller-manager-769dc69bc-sbnx4\" (UID: \"5009fa32-5a01-46dc-9238-2f3c8ef7fddc\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.123887 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xlg56\" (UniqueName: \"kubernetes.io/projected/0c289bf9-999d-4396-b15b-b27fded35180-kube-api-access-xlg56\") pod \"test-operator-controller-manager-5854674fcc-bk8hv\" (UID: \"0c289bf9-999d-4396-b15b-b27fded35180\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.194625 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm"] Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.195445 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.201078 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl8nz\" (UniqueName: \"kubernetes.io/projected/f353bd63-e193-4a26-8ba6-32f1eec034a8-kube-api-access-gl8nz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-6dh9j\" (UID: \"f353bd63-e193-4a26-8ba6-32f1eec034a8\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.221031 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-92xfj" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.221713 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.228932 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm"] Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.234748 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xl4r\" (UniqueName: \"kubernetes.io/projected/7bafbafa-4235-456c-a2b3-7990ad3f14e2-kube-api-access-5xl4r\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dmtqm\" (UID: \"7bafbafa-4235-456c-a2b3-7990ad3f14e2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.234800 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.234845 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xlg56\" (UniqueName: \"kubernetes.io/projected/0c289bf9-999d-4396-b15b-b27fded35180-kube-api-access-xlg56\") pod \"test-operator-controller-manager-5854674fcc-bk8hv\" (UID: \"0c289bf9-999d-4396-b15b-b27fded35180\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.234862 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.234926 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.234950 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xj59t\" (UniqueName: \"kubernetes.io/projected/5009fa32-5a01-46dc-9238-2f3c8ef7fddc-kube-api-access-xj59t\") pod \"watcher-operator-controller-manager-769dc69bc-sbnx4\" (UID: \"5009fa32-5a01-46dc-9238-2f3c8ef7fddc\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.235015 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2q7ct\" (UniqueName: \"kubernetes.io/projected/afada5e1-db62-40f7-b5a9-1c36f42670d4-kube-api-access-2q7ct\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.235435 4665 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.235471 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert podName:77dd89d3-29be-4d9c-ad35-a6281d6bd57f nodeName:}" failed. No retries permitted until 2025-12-05 01:28:05.235457559 +0000 UTC m=+1060.574849858 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" (UID: "77dd89d3-29be-4d9c-ad35-a6281d6bd57f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.265015 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xj59t\" (UniqueName: \"kubernetes.io/projected/5009fa32-5a01-46dc-9238-2f3c8ef7fddc-kube-api-access-xj59t\") pod \"watcher-operator-controller-manager-769dc69bc-sbnx4\" (UID: \"5009fa32-5a01-46dc-9238-2f3c8ef7fddc\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.334119 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xlg56\" (UniqueName: \"kubernetes.io/projected/0c289bf9-999d-4396-b15b-b27fded35180-kube-api-access-xlg56\") pod \"test-operator-controller-manager-5854674fcc-bk8hv\" (UID: \"0c289bf9-999d-4396-b15b-b27fded35180\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.339928 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2q7ct\" (UniqueName: \"kubernetes.io/projected/afada5e1-db62-40f7-b5a9-1c36f42670d4-kube-api-access-2q7ct\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.339998 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xl4r\" (UniqueName: \"kubernetes.io/projected/7bafbafa-4235-456c-a2b3-7990ad3f14e2-kube-api-access-5xl4r\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dmtqm\" (UID: \"7bafbafa-4235-456c-a2b3-7990ad3f14e2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.340026 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.340053 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.340199 4665 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.340247 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:04.840230978 +0000 UTC m=+1060.179623277 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "metrics-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.343116 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.343573 4665 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.343614 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:04.843597459 +0000 UTC m=+1060.182989838 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "webhook-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.393080 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xl4r\" (UniqueName: \"kubernetes.io/projected/7bafbafa-4235-456c-a2b3-7990ad3f14e2-kube-api-access-5xl4r\") pod \"rabbitmq-cluster-operator-manager-668c99d594-dmtqm\" (UID: \"7bafbafa-4235-456c-a2b3-7990ad3f14e2\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.397183 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2q7ct\" (UniqueName: \"kubernetes.io/projected/afada5e1-db62-40f7-b5a9-1c36f42670d4-kube-api-access-2q7ct\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.508107 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.569524 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.695279 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z"] Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.723584 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf"] Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.759287 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.759467 4665 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.759515 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert podName:68e17c62-dc97-4bf6-b9de-340e03d5cbda nodeName:}" failed. No retries permitted until 2025-12-05 01:28:06.759500649 +0000 UTC m=+1062.098892948 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert") pod "infra-operator-controller-manager-57548d458d-lt2sv" (UID: "68e17c62-dc97-4bf6-b9de-340e03d5cbda") : secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.860938 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: I1205 01:28:04.860991 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.861146 4665 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.861197 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:05.861180244 +0000 UTC m=+1061.200572543 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "metrics-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.861564 4665 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 01:28:04 crc kubenswrapper[4665]: E1205 01:28:04.861587 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:05.861579794 +0000 UTC m=+1061.200972093 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "webhook-server-cert" not found Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.048565 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.272906 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.273060 4665 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.273209 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert podName:77dd89d3-29be-4d9c-ad35-a6281d6bd57f nodeName:}" failed. No retries permitted until 2025-12-05 01:28:07.273190731 +0000 UTC m=+1062.612583030 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" (UID: "77dd89d3-29be-4d9c-ad35-a6281d6bd57f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.274996 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.292814 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.338738 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.357316 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf"] Dec 05 01:28:05 crc kubenswrapper[4665]: W1205 01:28:05.386737 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8bd45d32_e93b_415b_a885_3926454418c9.slice/crio-dbf40c5a0302a29e44064e7ddbef870a7597ac5592953c75ff7c30eeae16e190 WatchSource:0}: Error finding container dbf40c5a0302a29e44064e7ddbef870a7597ac5592953c75ff7c30eeae16e190: Status 404 returned error can't find the container with id dbf40c5a0302a29e44064e7ddbef870a7597ac5592953c75ff7c30eeae16e190 Dec 05 01:28:05 crc kubenswrapper[4665]: W1205 01:28:05.388193 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfa7a4766_c028_4114_b979_a7900e21103c.slice/crio-f34960a9ea795fb4e2b44101e2ee67578c55bf3c7a7f463d7c41beade888da21 WatchSource:0}: Error finding container f34960a9ea795fb4e2b44101e2ee67578c55bf3c7a7f463d7c41beade888da21: Status 404 returned error can't find the container with id f34960a9ea795fb4e2b44101e2ee67578c55bf3c7a7f463d7c41beade888da21 Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.392210 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.459482 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.462902 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" event={"ID":"93b5a195-1f87-4eb3-8511-916e652b3913","Type":"ContainerStarted","Data":"1c99a6c8447673e8d293c5f6670b1505ea0cafa1a505dc1cc61c17283d33aaf8"} Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.464246 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.464827 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" event={"ID":"1fd940e7-8adc-4859-a763-58d909016fd6","Type":"ContainerStarted","Data":"aba0c84e2daa8a4a7f812ce06c58c1ec864f9b532206768336b9de1fe26dc755"} Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.466033 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" event={"ID":"fa7a4766-c028-4114-b979-a7900e21103c","Type":"ContainerStarted","Data":"f34960a9ea795fb4e2b44101e2ee67578c55bf3c7a7f463d7c41beade888da21"} Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.466983 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" event={"ID":"e7235695-da27-4f1d-afec-a6f2a3decc79","Type":"ContainerStarted","Data":"9fa90dfb435710be285b56b4cae164cc01b7f1db5931d3b2050478ff43823c1f"} Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.468319 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" event={"ID":"3a85872e-618d-4847-aae0-1eb366f16003","Type":"ContainerStarted","Data":"79c66eb90967df33e7e0da3d333b943a3b754b6a632dab7c3d302300700b336f"} Dec 05 01:28:05 crc kubenswrapper[4665]: W1205 01:28:05.468931 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb8b497b_5207_408c_9e30_e7169c4ccede.slice/crio-b84b326133131c8d7a38ae47c338866b16fb296525d2dc35bb574a688975740b WatchSource:0}: Error finding container b84b326133131c8d7a38ae47c338866b16fb296525d2dc35bb574a688975740b: Status 404 returned error can't find the container with id b84b326133131c8d7a38ae47c338866b16fb296525d2dc35bb574a688975740b Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.470760 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" event={"ID":"8bd45d32-e93b-415b-a885-3926454418c9","Type":"ContainerStarted","Data":"dbf40c5a0302a29e44064e7ddbef870a7597ac5592953c75ff7c30eeae16e190"} Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.472162 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" event={"ID":"277c6945-0cbb-4a0a-8e22-e990d76da759","Type":"ContainerStarted","Data":"4a02a4fbaf9fe7524dd908cba4de10948d0791139aabbf27177bee4b1722cb24"} Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.474544 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" event={"ID":"c8e9557a-3433-413b-a5b2-9137f2b9c584","Type":"ContainerStarted","Data":"c1b3d8fe806004ceadc9652caeee4a73f8064d8412e3012b1f5c8d4db0281a8c"} Dec 05 01:28:05 crc kubenswrapper[4665]: W1205 01:28:05.480376 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod669a406d_6e51_4ead_89ff_4a1df7cb7308.slice/crio-6d74b7efa722a7e1163320badc5534c2eeffab87aa69378026f212622887e67a WatchSource:0}: Error finding container 6d74b7efa722a7e1163320badc5534c2eeffab87aa69378026f212622887e67a: Status 404 returned error can't find the container with id 6d74b7efa722a7e1163320badc5534c2eeffab87aa69378026f212622887e67a Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.613377 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.636476 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc"] Dec 05 01:28:05 crc kubenswrapper[4665]: W1205 01:28:05.638793 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod764119ea_4228_4141_a7a7_faee0be8d052.slice/crio-11d3d4aa2b567c1c731b3ccda816a60f868e652fb2f984fbb12d75feb3b3ac41 WatchSource:0}: Error finding container 11d3d4aa2b567c1c731b3ccda816a60f868e652fb2f984fbb12d75feb3b3ac41: Status 404 returned error can't find the container with id 11d3d4aa2b567c1c731b3ccda816a60f868e652fb2f984fbb12d75feb3b3ac41 Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.666184 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-fstjd"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.693024 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb"] Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.709929 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2"] Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.719659 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kqdvh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-nmpk2_openstack-operators(c874c72f-9ac0-4ce5-bf5c-fc9e983b725c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.722145 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm"] Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.726451 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kqdvh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-nmpk2_openstack-operators(c874c72f-9ac0-4ce5-bf5c-fc9e983b725c): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.728335 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" podUID="c874c72f-9ac0-4ce5-bf5c-fc9e983b725c" Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.734695 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm"] Dec 05 01:28:05 crc kubenswrapper[4665]: W1205 01:28:05.759287 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5c1d9b52_7fcd_4615_9faa_af55e4165ffb.slice/crio-487f5c32795e5a8f81754c6a78921e1238a867ca93d2414768547d983fcf3265 WatchSource:0}: Error finding container 487f5c32795e5a8f81754c6a78921e1238a867ca93d2414768547d983fcf3265: Status 404 returned error can't find the container with id 487f5c32795e5a8f81754c6a78921e1238a867ca93d2414768547d983fcf3265 Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.773199 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xlg56,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-bk8hv_openstack-operators(0c289bf9-999d-4396-b15b-b27fded35180): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.777100 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xlg56,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-bk8hv_openstack-operators(0c289bf9-999d-4396-b15b-b27fded35180): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.778214 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bxmnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-dlncm_openstack-operators(3933c0fc-ca36-42a1-b418-9db281576617): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.778337 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" podUID="0c289bf9-999d-4396-b15b-b27fded35180" Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.799332 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4"] Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.800620 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bxmnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-dlncm_openstack-operators(3933c0fc-ca36-42a1-b418-9db281576617): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.805530 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" podUID="3933c0fc-ca36-42a1-b418-9db281576617" Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.815440 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j"] Dec 05 01:28:05 crc kubenswrapper[4665]: W1205 01:28:05.825543 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf353bd63_e193_4a26_8ba6_32f1eec034a8.slice/crio-594639c49a997de14e30e47565610f0e65d985a00cb7757a434cef18902aef56 WatchSource:0}: Error finding container 594639c49a997de14e30e47565610f0e65d985a00cb7757a434cef18902aef56: Status 404 returned error can't find the container with id 594639c49a997de14e30e47565610f0e65d985a00cb7757a434cef18902aef56 Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.831677 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gl8nz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-6dh9j_openstack-operators(f353bd63-e193-4a26-8ba6-32f1eec034a8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.838458 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv"] Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.842879 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mkqq6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-nfmtb_openstack-operators(5c1d9b52-7fcd-4615-9faa-af55e4165ffb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.842995 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gl8nz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-6dh9j_openstack-operators(f353bd63-e193-4a26-8ba6-32f1eec034a8): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.845535 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" podUID="f353bd63-e193-4a26-8ba6-32f1eec034a8" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.853871 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mkqq6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-nfmtb_openstack-operators(5c1d9b52-7fcd-4615-9faa-af55e4165ffb): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.856018 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" podUID="5c1d9b52-7fcd-4615-9faa-af55e4165ffb" Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.900165 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:05 crc kubenswrapper[4665]: I1205 01:28:05.900237 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.900502 4665 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.900664 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:07.900597397 +0000 UTC m=+1063.239989736 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "metrics-server-cert" not found Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.900728 4665 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 01:28:05 crc kubenswrapper[4665]: E1205 01:28:05.900765 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:07.900755841 +0000 UTC m=+1063.240148150 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "webhook-server-cert" not found Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.506118 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" event={"ID":"3933c0fc-ca36-42a1-b418-9db281576617","Type":"ContainerStarted","Data":"2f53f70dbfa22befbcc701ab3dcb22a678749044d54a20f36e676b6c76ae3686"} Dec 05 01:28:06 crc kubenswrapper[4665]: E1205 01:28:06.510160 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" podUID="3933c0fc-ca36-42a1-b418-9db281576617" Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.516216 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" event={"ID":"8543ed45-b6f4-4f54-bc94-756bf6f031e6","Type":"ContainerStarted","Data":"9e80cc3dbb2611b15ce762de90b03ff6b14bf3e7fb95599ec04d3197053289b6"} Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.527220 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" event={"ID":"7bafbafa-4235-456c-a2b3-7990ad3f14e2","Type":"ContainerStarted","Data":"64c1d223fa0cea4aa891ecdd3706f398c0e3f4d8dfe5199e9396f9878f5bf878"} Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.530999 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" event={"ID":"0c289bf9-999d-4396-b15b-b27fded35180","Type":"ContainerStarted","Data":"5b176f430f6db45653e25e3f2b5590d112ce95bf408a931f33e2778665dbd85c"} Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.544686 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" event={"ID":"5009fa32-5a01-46dc-9238-2f3c8ef7fddc","Type":"ContainerStarted","Data":"c3a029edaab1949dbe4bd9bda640cc5b9192547a5e400e36303a9a304e98356c"} Dec 05 01:28:06 crc kubenswrapper[4665]: E1205 01:28:06.545576 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" podUID="0c289bf9-999d-4396-b15b-b27fded35180" Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.546580 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" event={"ID":"8d94649e-ea57-4b1a-9fb0-2b37b567cd77","Type":"ContainerStarted","Data":"dc83e1b86d114c1de1e80f083612ec6b05bccab70a68b92fc7bccaa5dbb61ee1"} Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.547684 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" event={"ID":"764119ea-4228-4141-a7a7-faee0be8d052","Type":"ContainerStarted","Data":"11d3d4aa2b567c1c731b3ccda816a60f868e652fb2f984fbb12d75feb3b3ac41"} Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.560090 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" event={"ID":"669a406d-6e51-4ead-89ff-4a1df7cb7308","Type":"ContainerStarted","Data":"6d74b7efa722a7e1163320badc5534c2eeffab87aa69378026f212622887e67a"} Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.571187 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" event={"ID":"5c1d9b52-7fcd-4615-9faa-af55e4165ffb","Type":"ContainerStarted","Data":"487f5c32795e5a8f81754c6a78921e1238a867ca93d2414768547d983fcf3265"} Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.578875 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" event={"ID":"c874c72f-9ac0-4ce5-bf5c-fc9e983b725c","Type":"ContainerStarted","Data":"ef31f5c1c730935fc77ca94e60e2bc1b147503f163170859708cad7cf25c047d"} Dec 05 01:28:06 crc kubenswrapper[4665]: E1205 01:28:06.579407 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" podUID="5c1d9b52-7fcd-4615-9faa-af55e4165ffb" Dec 05 01:28:06 crc kubenswrapper[4665]: E1205 01:28:06.584601 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" podUID="c874c72f-9ac0-4ce5-bf5c-fc9e983b725c" Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.589132 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" event={"ID":"fb8b497b-5207-408c-9e30-e7169c4ccede","Type":"ContainerStarted","Data":"b84b326133131c8d7a38ae47c338866b16fb296525d2dc35bb574a688975740b"} Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.598897 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" event={"ID":"f353bd63-e193-4a26-8ba6-32f1eec034a8","Type":"ContainerStarted","Data":"594639c49a997de14e30e47565610f0e65d985a00cb7757a434cef18902aef56"} Dec 05 01:28:06 crc kubenswrapper[4665]: E1205 01:28:06.609689 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" podUID="f353bd63-e193-4a26-8ba6-32f1eec034a8" Dec 05 01:28:06 crc kubenswrapper[4665]: I1205 01:28:06.819933 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:06 crc kubenswrapper[4665]: E1205 01:28:06.820136 4665 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:06 crc kubenswrapper[4665]: E1205 01:28:06.820193 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert podName:68e17c62-dc97-4bf6-b9de-340e03d5cbda nodeName:}" failed. No retries permitted until 2025-12-05 01:28:10.820174628 +0000 UTC m=+1066.159566927 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert") pod "infra-operator-controller-manager-57548d458d-lt2sv" (UID: "68e17c62-dc97-4bf6-b9de-340e03d5cbda") : secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:07 crc kubenswrapper[4665]: I1205 01:28:07.346571 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.346756 4665 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.346842 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert podName:77dd89d3-29be-4d9c-ad35-a6281d6bd57f nodeName:}" failed. No retries permitted until 2025-12-05 01:28:11.346818432 +0000 UTC m=+1066.686210741 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" (UID: "77dd89d3-29be-4d9c-ad35-a6281d6bd57f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.625769 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" podUID="0c289bf9-999d-4396-b15b-b27fded35180" Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.627899 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" podUID="c874c72f-9ac0-4ce5-bf5c-fc9e983b725c" Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.628011 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" podUID="f353bd63-e193-4a26-8ba6-32f1eec034a8" Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.628061 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" podUID="3933c0fc-ca36-42a1-b418-9db281576617" Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.640771 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" podUID="5c1d9b52-7fcd-4615-9faa-af55e4165ffb" Dec 05 01:28:07 crc kubenswrapper[4665]: I1205 01:28:07.983011 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:07 crc kubenswrapper[4665]: I1205 01:28:07.983086 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.985904 4665 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.986011 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:11.985970729 +0000 UTC m=+1067.325363028 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "webhook-server-cert" not found Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.986519 4665 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 01:28:07 crc kubenswrapper[4665]: E1205 01:28:07.986551 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:11.986543274 +0000 UTC m=+1067.325935573 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "metrics-server-cert" not found Dec 05 01:28:10 crc kubenswrapper[4665]: I1205 01:28:10.832829 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:10 crc kubenswrapper[4665]: E1205 01:28:10.833040 4665 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:10 crc kubenswrapper[4665]: E1205 01:28:10.833462 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert podName:68e17c62-dc97-4bf6-b9de-340e03d5cbda nodeName:}" failed. No retries permitted until 2025-12-05 01:28:18.833403216 +0000 UTC m=+1074.172795515 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert") pod "infra-operator-controller-manager-57548d458d-lt2sv" (UID: "68e17c62-dc97-4bf6-b9de-340e03d5cbda") : secret "infra-operator-webhook-server-cert" not found Dec 05 01:28:11 crc kubenswrapper[4665]: I1205 01:28:11.440557 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:11 crc kubenswrapper[4665]: E1205 01:28:11.440784 4665 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:11 crc kubenswrapper[4665]: E1205 01:28:11.440829 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert podName:77dd89d3-29be-4d9c-ad35-a6281d6bd57f nodeName:}" failed. No retries permitted until 2025-12-05 01:28:19.440815102 +0000 UTC m=+1074.780207401 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" (UID: "77dd89d3-29be-4d9c-ad35-a6281d6bd57f") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 01:28:12 crc kubenswrapper[4665]: I1205 01:28:12.049646 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:12 crc kubenswrapper[4665]: I1205 01:28:12.049763 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:12 crc kubenswrapper[4665]: E1205 01:28:12.049922 4665 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 01:28:12 crc kubenswrapper[4665]: E1205 01:28:12.049972 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:20.049957589 +0000 UTC m=+1075.389349888 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "metrics-server-cert" not found Dec 05 01:28:12 crc kubenswrapper[4665]: E1205 01:28:12.050337 4665 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 01:28:12 crc kubenswrapper[4665]: E1205 01:28:12.050365 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs podName:afada5e1-db62-40f7-b5a9-1c36f42670d4 nodeName:}" failed. No retries permitted until 2025-12-05 01:28:20.050357498 +0000 UTC m=+1075.389749797 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs") pod "openstack-operator-controller-manager-79966545b7-vd7h4" (UID: "afada5e1-db62-40f7-b5a9-1c36f42670d4") : secret "webhook-server-cert" not found Dec 05 01:28:18 crc kubenswrapper[4665]: I1205 01:28:18.845905 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:18 crc kubenswrapper[4665]: I1205 01:28:18.859228 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/68e17c62-dc97-4bf6-b9de-340e03d5cbda-cert\") pod \"infra-operator-controller-manager-57548d458d-lt2sv\" (UID: \"68e17c62-dc97-4bf6-b9de-340e03d5cbda\") " pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:18 crc kubenswrapper[4665]: I1205 01:28:18.867781 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:19 crc kubenswrapper[4665]: I1205 01:28:19.455663 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:19 crc kubenswrapper[4665]: I1205 01:28:19.463351 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77dd89d3-29be-4d9c-ad35-a6281d6bd57f-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9\" (UID: \"77dd89d3-29be-4d9c-ad35-a6281d6bd57f\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:19 crc kubenswrapper[4665]: I1205 01:28:19.476465 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:20 crc kubenswrapper[4665]: I1205 01:28:20.063070 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:20 crc kubenswrapper[4665]: I1205 01:28:20.063138 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:20 crc kubenswrapper[4665]: I1205 01:28:20.068804 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-webhook-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:20 crc kubenswrapper[4665]: I1205 01:28:20.076022 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/afada5e1-db62-40f7-b5a9-1c36f42670d4-metrics-certs\") pod \"openstack-operator-controller-manager-79966545b7-vd7h4\" (UID: \"afada5e1-db62-40f7-b5a9-1c36f42670d4\") " pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:20 crc kubenswrapper[4665]: I1205 01:28:20.280723 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:20 crc kubenswrapper[4665]: E1205 01:28:20.882950 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea" Dec 05 01:28:20 crc kubenswrapper[4665]: E1205 01:28:20.883422 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/barbican-operator@sha256:f6059a0fbf031d34dcf086d14ce8c0546caeaee23c5780e90b5037c5feee9fea,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qsdzr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-8hblt_openstack-operators(fa7a4766-c028-4114-b979-a7900e21103c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:27 crc kubenswrapper[4665]: E1205 01:28:27.093102 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621" Dec 05 01:28:27 crc kubenswrapper[4665]: E1205 01:28:27.094175 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/watcher-operator@sha256:9aa8c03633e4b934c57868c1660acf47e7d386ac86bcb344df262c9ad76b8621,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xj59t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod watcher-operator-controller-manager-769dc69bc-sbnx4_openstack-operators(5009fa32-5a01-46dc-9238-2f3c8ef7fddc): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:27 crc kubenswrapper[4665]: E1205 01:28:27.778752 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9" Dec 05 01:28:27 crc kubenswrapper[4665]: E1205 01:28:27.779049 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/manila-operator@sha256:2e59cfbeefc3aff0bb0a6ae9ce2235129f5173c98dd5ee8dac229ad4895faea9,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-t6pgn,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod manila-operator-controller-manager-7c79b5df47-dxk89_openstack-operators(1fd940e7-8adc-4859-a763-58d909016fd6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:31 crc kubenswrapper[4665]: E1205 01:28:31.858042 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168" Dec 05 01:28:31 crc kubenswrapper[4665]: E1205 01:28:31.858478 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-hnvfm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-dnqxs_openstack-operators(764119ea-4228-4141-a7a7-faee0be8d052): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:34 crc kubenswrapper[4665]: E1205 01:28:34.000225 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557" Dec 05 01:28:34 crc kubenswrapper[4665]: E1205 01:28:34.003366 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/neutron-operator@sha256:0b3fb69f35c151895d3dffd514974a9f9fe1c77c3bca69b78b81efb183cf4557,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-bt46x,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod neutron-operator-controller-manager-5fdfd5b6b5-fbmtc_openstack-operators(8d94649e-ea57-4b1a-9fb0-2b37b567cd77): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:35 crc kubenswrapper[4665]: E1205 01:28:35.574095 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7" Dec 05 01:28:35 crc kubenswrapper[4665]: E1205 01:28:35.574688 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-ftzbm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-5qmj8_openstack-operators(e7235695-da27-4f1d-afec-a6f2a3decc79): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:36 crc kubenswrapper[4665]: E1205 01:28:36.481223 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801" Dec 05 01:28:36 crc kubenswrapper[4665]: E1205 01:28:36.481495 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-f69np,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-ptwdk_openstack-operators(669a406d-6e51-4ead-89ff-4a1df7cb7308): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:39 crc kubenswrapper[4665]: E1205 01:28:39.920217 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429" Dec 05 01:28:39 crc kubenswrapper[4665]: E1205 01:28:39.921053 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/heat-operator@sha256:c4abfc148600dfa85915f3dc911d988ea2335f26cb6b8d749fe79bfe53e5e429,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-fcf9h,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod heat-operator-controller-manager-5f64f6f8bb-dz2g4_openstack-operators(93b5a195-1f87-4eb3-8511-916e652b3913): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:41 crc kubenswrapper[4665]: E1205 01:28:41.556316 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385" Dec 05 01:28:41 crc kubenswrapper[4665]: E1205 01:28:41.556543 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-gl8nz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-6dh9j_openstack-operators(f353bd63-e193-4a26-8ba6-32f1eec034a8): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:41 crc kubenswrapper[4665]: E1205 01:28:41.970117 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 05 01:28:41 crc kubenswrapper[4665]: E1205 01:28:41.970577 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-kqdvh,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-nmpk2_openstack-operators(c874c72f-9ac0-4ce5-bf5c-fc9e983b725c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:42 crc kubenswrapper[4665]: E1205 01:28:42.452332 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94" Dec 05 01:28:42 crc kubenswrapper[4665]: E1205 01:28:42.452543 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xlg56,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-bk8hv_openstack-operators(0c289bf9-999d-4396-b15b-b27fded35180): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:44 crc kubenswrapper[4665]: E1205 01:28:44.307190 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670" Dec 05 01:28:44 crc kubenswrapper[4665]: E1205 01:28:44.307399 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-l9hgk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-fgjvz_openstack-operators(fb8b497b-5207-408c-9e30-e7169c4ccede): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:44 crc kubenswrapper[4665]: E1205 01:28:44.640986 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2" Dec 05 01:28:44 crc kubenswrapper[4665]: E1205 01:28:44.641195 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-5xl4r,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-dmtqm_openstack-operators(7bafbafa-4235-456c-a2b3-7990ad3f14e2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:44 crc kubenswrapper[4665]: E1205 01:28:44.642378 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" podUID="7bafbafa-4235-456c-a2b3-7990ad3f14e2" Dec 05 01:28:44 crc kubenswrapper[4665]: E1205 01:28:44.873133 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" podUID="7bafbafa-4235-456c-a2b3-7990ad3f14e2" Dec 05 01:28:45 crc kubenswrapper[4665]: E1205 01:28:45.335511 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 01:28:45 crc kubenswrapper[4665]: E1205 01:28:45.335982 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-mkqq6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-nfmtb_openstack-operators(5c1d9b52-7fcd-4615-9faa-af55e4165ffb): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:45 crc kubenswrapper[4665]: I1205 01:28:45.754402 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4"] Dec 05 01:28:45 crc kubenswrapper[4665]: I1205 01:28:45.801626 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv"] Dec 05 01:28:45 crc kubenswrapper[4665]: I1205 01:28:45.867330 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9"] Dec 05 01:28:48 crc kubenswrapper[4665]: W1205 01:28:48.872492 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podafada5e1_db62_40f7_b5a9_1c36f42670d4.slice/crio-853813c2d6a623c1a9b95e7499a30f360d020795e61f8e84322df89679a8eea5 WatchSource:0}: Error finding container 853813c2d6a623c1a9b95e7499a30f360d020795e61f8e84322df89679a8eea5: Status 404 returned error can't find the container with id 853813c2d6a623c1a9b95e7499a30f360d020795e61f8e84322df89679a8eea5 Dec 05 01:28:48 crc kubenswrapper[4665]: W1205 01:28:48.877640 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod68e17c62_dc97_4bf6_b9de_340e03d5cbda.slice/crio-59d84e9acc221e10fc6b8e1294930b4e89a006265002bd473fc75f8d4225be19 WatchSource:0}: Error finding container 59d84e9acc221e10fc6b8e1294930b4e89a006265002bd473fc75f8d4225be19: Status 404 returned error can't find the container with id 59d84e9acc221e10fc6b8e1294930b4e89a006265002bd473fc75f8d4225be19 Dec 05 01:28:48 crc kubenswrapper[4665]: I1205 01:28:48.908824 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" event={"ID":"68e17c62-dc97-4bf6-b9de-340e03d5cbda","Type":"ContainerStarted","Data":"59d84e9acc221e10fc6b8e1294930b4e89a006265002bd473fc75f8d4225be19"} Dec 05 01:28:48 crc kubenswrapper[4665]: I1205 01:28:48.909125 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" event={"ID":"77dd89d3-29be-4d9c-ad35-a6281d6bd57f","Type":"ContainerStarted","Data":"178399e960a1767b9c55b94165575b07d7cad6e24797a5cfac9f1d068b3f0a25"} Dec 05 01:28:48 crc kubenswrapper[4665]: I1205 01:28:48.909215 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" event={"ID":"afada5e1-db62-40f7-b5a9-1c36f42670d4","Type":"ContainerStarted","Data":"853813c2d6a623c1a9b95e7499a30f360d020795e61f8e84322df89679a8eea5"} Dec 05 01:28:49 crc kubenswrapper[4665]: I1205 01:28:49.919531 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" event={"ID":"3933c0fc-ca36-42a1-b418-9db281576617","Type":"ContainerStarted","Data":"85094bf4ed26610d5dcabd31b593b410d1be9856bf86c53e6644e26a06c1f051"} Dec 05 01:28:49 crc kubenswrapper[4665]: I1205 01:28:49.924429 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" event={"ID":"c8e9557a-3433-413b-a5b2-9137f2b9c584","Type":"ContainerStarted","Data":"e3b63ce3b9aa7055103f29632e82747f73234a19273f27a71555ed86cf478e5e"} Dec 05 01:28:49 crc kubenswrapper[4665]: I1205 01:28:49.930362 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" event={"ID":"8543ed45-b6f4-4f54-bc94-756bf6f031e6","Type":"ContainerStarted","Data":"cfdce61f8aa4e12a7036266674ada84a435812715d85b93d058d4a6775fed9b9"} Dec 05 01:28:49 crc kubenswrapper[4665]: I1205 01:28:49.936696 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" event={"ID":"3a85872e-618d-4847-aae0-1eb366f16003","Type":"ContainerStarted","Data":"ba3f6fa596173ea2c8940f043bfa83a5598711765409e40f6559199f045695da"} Dec 05 01:28:49 crc kubenswrapper[4665]: I1205 01:28:49.937946 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" event={"ID":"8bd45d32-e93b-415b-a885-3926454418c9","Type":"ContainerStarted","Data":"af2d899066f70b2e66c23329c8ecbbbfe082317ed26b8daba7bdd7ee4251cf61"} Dec 05 01:28:49 crc kubenswrapper[4665]: I1205 01:28:49.939424 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" event={"ID":"277c6945-0cbb-4a0a-8e22-e990d76da759","Type":"ContainerStarted","Data":"962d029c97e2950473a78831385ee479d7a98debe6ab51c7601ace2fc0ebc80a"} Dec 05 01:28:51 crc kubenswrapper[4665]: I1205 01:28:51.956950 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" event={"ID":"afada5e1-db62-40f7-b5a9-1c36f42670d4","Type":"ContainerStarted","Data":"79405e28b671a5afe101cfdf39bee99a16b20a0b67aaafb4c1afb11820d2b13e"} Dec 05 01:28:52 crc kubenswrapper[4665]: I1205 01:28:52.963277 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:28:53 crc kubenswrapper[4665]: I1205 01:28:53.012235 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" podStartSLOduration=50.012213529 podStartE2EDuration="50.012213529s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:28:53.009546745 +0000 UTC m=+1108.348939064" watchObservedRunningTime="2025-12-05 01:28:53.012213529 +0000 UTC m=+1108.351605828" Dec 05 01:28:54 crc kubenswrapper[4665]: E1205 01:28:54.122334 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0" Dec 05 01:28:54 crc kubenswrapper[4665]: E1205 01:28:54.122770 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qsdzr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-operator-controller-manager-7d9dfd778-8hblt_openstack-operators(fa7a4766-c028-4114-b979-a7900e21103c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:28:54 crc kubenswrapper[4665]: E1205 01:28:54.124943 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"]" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" podUID="fa7a4766-c028-4114-b979-a7900e21103c" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.375011 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" podUID="764119ea-4228-4141-a7a7-faee0be8d052" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.395947 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" podUID="c874c72f-9ac0-4ce5-bf5c-fc9e983b725c" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.404812 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" podUID="e7235695-da27-4f1d-afec-a6f2a3decc79" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.502741 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" podUID="669a406d-6e51-4ead-89ff-4a1df7cb7308" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.612575 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" podUID="0c289bf9-999d-4396-b15b-b27fded35180" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.619645 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" podUID="5009fa32-5a01-46dc-9238-2f3c8ef7fddc" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.628190 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" podUID="1fd940e7-8adc-4859-a763-58d909016fd6" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.628243 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" podUID="5c1d9b52-7fcd-4615-9faa-af55e4165ffb" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.628332 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" podUID="8d94649e-ea57-4b1a-9fb0-2b37b567cd77" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.818678 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" podUID="fb8b497b-5207-408c-9e30-e7169c4ccede" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.829747 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" podUID="f353bd63-e193-4a26-8ba6-32f1eec034a8" Dec 05 01:28:55 crc kubenswrapper[4665]: E1205 01:28:55.884781 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" podUID="93b5a195-1f87-4eb3-8511-916e652b3913" Dec 05 01:28:55 crc kubenswrapper[4665]: I1205 01:28:55.985307 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" event={"ID":"5009fa32-5a01-46dc-9238-2f3c8ef7fddc","Type":"ContainerStarted","Data":"19fd136a8879cd5ffd1d4a5ac2e8a28706b894e1d1ba8c3c10e034e1deb14a96"} Dec 05 01:28:55 crc kubenswrapper[4665]: I1205 01:28:55.989416 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" event={"ID":"764119ea-4228-4141-a7a7-faee0be8d052","Type":"ContainerStarted","Data":"492c1c4f7ad278bf7e8cc1ce919838d73877b1a703fa515cabdc394849a4cfa5"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:55.999443 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" event={"ID":"3933c0fc-ca36-42a1-b418-9db281576617","Type":"ContainerStarted","Data":"0948cb3cbfcd1b4e92abbdb0cab9168608c78184420121cdb6f4a490854d4fea"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.000356 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.004740 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.028418 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" event={"ID":"68e17c62-dc97-4bf6-b9de-340e03d5cbda","Type":"ContainerStarted","Data":"564216b1534cb6ba66769c7be89125d6ead27282cc40d1ce89030660766d2e3f"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.028458 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" event={"ID":"68e17c62-dc97-4bf6-b9de-340e03d5cbda","Type":"ContainerStarted","Data":"db868b6a99d6977d3977ad2d4c64d7abaa0030eaec88ee0aec74413366462175"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.029054 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.042318 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" event={"ID":"5c1d9b52-7fcd-4615-9faa-af55e4165ffb","Type":"ContainerStarted","Data":"6f6c494ad87c556ab329b4e81e81d877bc504f0f5521c0580ae3a13ec1cb7b87"} Dec 05 01:28:56 crc kubenswrapper[4665]: E1205 01:28:56.043770 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7\\\"\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" podUID="5c1d9b52-7fcd-4615-9faa-af55e4165ffb" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.057793 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" event={"ID":"0c289bf9-999d-4396-b15b-b27fded35180","Type":"ContainerStarted","Data":"6d32f9ab7fd8dd908cd9362036c2da9a17c8f30ccecafd0b46107e0f4f98d9e1"} Dec 05 01:28:56 crc kubenswrapper[4665]: E1205 01:28:56.063992 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\"" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" podUID="0c289bf9-999d-4396-b15b-b27fded35180" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.080608 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" event={"ID":"fb8b497b-5207-408c-9e30-e7169c4ccede","Type":"ContainerStarted","Data":"d1855ad8cda8726655047dfb54e0dfa815ac18a41af7d64dadad8f33571bf4e0"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.080939 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-dlncm" podStartSLOduration=3.960015926 podStartE2EDuration="53.080922645s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.77801952 +0000 UTC m=+1061.117411819" lastFinishedPulling="2025-12-05 01:28:54.898926239 +0000 UTC m=+1110.238318538" observedRunningTime="2025-12-05 01:28:56.070689939 +0000 UTC m=+1111.410082238" watchObservedRunningTime="2025-12-05 01:28:56.080922645 +0000 UTC m=+1111.420314944" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.100771 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" event={"ID":"93b5a195-1f87-4eb3-8511-916e652b3913","Type":"ContainerStarted","Data":"89b8032fc99f048a842c13efb7e9dbfb622feffc9c5fb97fdbf279bd3b2b9ed2"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.150457 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" event={"ID":"8543ed45-b6f4-4f54-bc94-756bf6f031e6","Type":"ContainerStarted","Data":"148b7408e6056b9f2e8ce741aff28d33c564afbec400efd25615f2c8e154c93e"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.151486 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.153490 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" podStartSLOduration=48.307126891 podStartE2EDuration="54.153472381s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:48.879923375 +0000 UTC m=+1104.219315674" lastFinishedPulling="2025-12-05 01:28:54.726268865 +0000 UTC m=+1110.065661164" observedRunningTime="2025-12-05 01:28:56.151703919 +0000 UTC m=+1111.491096218" watchObservedRunningTime="2025-12-05 01:28:56.153472381 +0000 UTC m=+1111.492864680" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.157467 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.177937 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" event={"ID":"c8e9557a-3433-413b-a5b2-9137f2b9c584","Type":"ContainerStarted","Data":"f019a095440e1ff26321b4b7afdf1190e588270716ca3a9573f567749642858a"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.178615 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.185536 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.190725 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" event={"ID":"77dd89d3-29be-4d9c-ad35-a6281d6bd57f","Type":"ContainerStarted","Data":"f0998f9a6e207682159362bb3b22fe941056ee92add6d13a9c458184037505a2"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.190753 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" event={"ID":"77dd89d3-29be-4d9c-ad35-a6281d6bd57f","Type":"ContainerStarted","Data":"dbae2c906845474e385ac716e779cd324f84b3b9f09cd57cc54f228e9a6176fe"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.191244 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.192261 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" event={"ID":"3a85872e-618d-4847-aae0-1eb366f16003","Type":"ContainerStarted","Data":"4ae44827392680bfd65ed438855bce8a567b7e9110c8de25075a9ff385d89ea0"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.192805 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.202546 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" event={"ID":"1fd940e7-8adc-4859-a763-58d909016fd6","Type":"ContainerStarted","Data":"0530687e5cd46538d0ff245a6fd063dc1da111afdc08b5897c7fab65ac53411d"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.208038 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.219062 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" event={"ID":"f353bd63-e193-4a26-8ba6-32f1eec034a8","Type":"ContainerStarted","Data":"76536840cafe6381a861ddf6d61acf44ce01b266310deb5d31c10c45f5cf0a7e"} Dec 05 01:28:56 crc kubenswrapper[4665]: E1205 01:28:56.220620 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\"" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" podUID="f353bd63-e193-4a26-8ba6-32f1eec034a8" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.228847 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" event={"ID":"e7235695-da27-4f1d-afec-a6f2a3decc79","Type":"ContainerStarted","Data":"c0755f8774f271228ed01fb8021dd58b9139d882d8903a893ee85716ce0c5397"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.231741 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" event={"ID":"8d94649e-ea57-4b1a-9fb0-2b37b567cd77","Type":"ContainerStarted","Data":"7574d31a9b4cef890c1ad45ff1809b13893e146d21d327a6922650626c04db30"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.234802 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-9pbl9" podStartSLOduration=4.428639849 podStartE2EDuration="54.234782078s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.06313522 +0000 UTC m=+1060.402527519" lastFinishedPulling="2025-12-05 01:28:54.869277449 +0000 UTC m=+1110.208669748" observedRunningTime="2025-12-05 01:28:56.230480515 +0000 UTC m=+1111.569872804" watchObservedRunningTime="2025-12-05 01:28:56.234782078 +0000 UTC m=+1111.574174377" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.247428 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" event={"ID":"8bd45d32-e93b-415b-a885-3926454418c9","Type":"ContainerStarted","Data":"2934e592d40587f87a62e57dac488ac4767c1677130f08ef7805a5daca2ad4a2"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.248337 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.255001 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.256919 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-6bsqf" podStartSLOduration=4.048083818 podStartE2EDuration="54.256906118s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:04.780272639 +0000 UTC m=+1060.119664938" lastFinishedPulling="2025-12-05 01:28:54.989094939 +0000 UTC m=+1110.328487238" observedRunningTime="2025-12-05 01:28:56.2523968 +0000 UTC m=+1111.591789089" watchObservedRunningTime="2025-12-05 01:28:56.256906118 +0000 UTC m=+1111.596298417" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.274643 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" event={"ID":"277c6945-0cbb-4a0a-8e22-e990d76da759","Type":"ContainerStarted","Data":"81b99587026031e88d39e14a8f7276db8927b6ee05db385c20ea0e25f9679be2"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.275575 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.277400 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.282787 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" event={"ID":"669a406d-6e51-4ead-89ff-4a1df7cb7308","Type":"ContainerStarted","Data":"5a507ae850d573ccdce2111d453a85b086620997a5648b390972ddfad348bd40"} Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.286209 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" event={"ID":"c874c72f-9ac0-4ce5-bf5c-fc9e983b725c","Type":"ContainerStarted","Data":"5eb79a1a3219d7f3e9844bddb5a3b4eb3428b6549f2cee1209ad0405b6dd05a7"} Dec 05 01:28:56 crc kubenswrapper[4665]: E1205 01:28:56.287376 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59\\\"\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" podUID="c874c72f-9ac0-4ce5-bf5c-fc9e983b725c" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.293976 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-fstjd" podStartSLOduration=4.098886923 podStartE2EDuration="53.293959916s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.703468827 +0000 UTC m=+1061.042861126" lastFinishedPulling="2025-12-05 01:28:54.89854182 +0000 UTC m=+1110.237934119" observedRunningTime="2025-12-05 01:28:56.287521781 +0000 UTC m=+1111.626914080" watchObservedRunningTime="2025-12-05 01:28:56.293959916 +0000 UTC m=+1111.633352215" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.372522 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" podStartSLOduration=47.534781043 podStartE2EDuration="53.372502357s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:48.888590792 +0000 UTC m=+1104.227983101" lastFinishedPulling="2025-12-05 01:28:54.726312116 +0000 UTC m=+1110.065704415" observedRunningTime="2025-12-05 01:28:56.359088705 +0000 UTC m=+1111.698481004" watchObservedRunningTime="2025-12-05 01:28:56.372502357 +0000 UTC m=+1111.711894656" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.599175 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-ks65z" podStartSLOduration=4.478983855 podStartE2EDuration="54.599160365s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:04.742511861 +0000 UTC m=+1060.081904160" lastFinishedPulling="2025-12-05 01:28:54.862688371 +0000 UTC m=+1110.202080670" observedRunningTime="2025-12-05 01:28:56.598833816 +0000 UTC m=+1111.938226115" watchObservedRunningTime="2025-12-05 01:28:56.599160365 +0000 UTC m=+1111.938552664" Dec 05 01:28:56 crc kubenswrapper[4665]: I1205 01:28:56.677744 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-q6zxf" podStartSLOduration=5.152218423 podStartE2EDuration="54.677724236s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.415484913 +0000 UTC m=+1060.754877212" lastFinishedPulling="2025-12-05 01:28:54.940990726 +0000 UTC m=+1110.280383025" observedRunningTime="2025-12-05 01:28:56.631580961 +0000 UTC m=+1111.970973260" watchObservedRunningTime="2025-12-05 01:28:56.677724236 +0000 UTC m=+1112.017116535" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.300520 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" event={"ID":"1fd940e7-8adc-4859-a763-58d909016fd6","Type":"ContainerStarted","Data":"2469199e5da6fb962864a0c76f2f3ffe6abd46c2f29e2245c1a310a92026155c"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.300828 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.303982 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" event={"ID":"5009fa32-5a01-46dc-9238-2f3c8ef7fddc","Type":"ContainerStarted","Data":"a148ce0fcd16fc0e0cd0e9800d2404ab61f88ed129f8f1f6d352f1b22b489506"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.304145 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.306208 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" event={"ID":"764119ea-4228-4141-a7a7-faee0be8d052","Type":"ContainerStarted","Data":"169cb8c9fa3e1bc5807618ab019680b16379ef2433a463934d79ccffae41f4cf"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.306441 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.307705 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" event={"ID":"7bafbafa-4235-456c-a2b3-7990ad3f14e2","Type":"ContainerStarted","Data":"351680a5c8ac13f6e9f4897fbd8d491ce64178e1256c08951e22cc5aba571d28"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.309915 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" event={"ID":"669a406d-6e51-4ead-89ff-4a1df7cb7308","Type":"ContainerStarted","Data":"b5160e67ca78a3a50f0ba94d349f1306f063f9ce58e690ea6fa3352b06fae43b"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.310246 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.311715 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" event={"ID":"fa7a4766-c028-4114-b979-a7900e21103c","Type":"ContainerStarted","Data":"9465ab6b9fe14b498f4fab6a485d41eaf8091eb27c59b51ae7548c555ef04476"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.311746 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" event={"ID":"fa7a4766-c028-4114-b979-a7900e21103c","Type":"ContainerStarted","Data":"697729ddae2c4494a6cd615ca1c809eb70c4ef6f1e8718cbad0cba50a04152cf"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.311910 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.313390 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" event={"ID":"fb8b497b-5207-408c-9e30-e7169c4ccede","Type":"ContainerStarted","Data":"8133c6824122b32ef557bd6a1d60a3fd31710b8f57859ffd22be231f268fddb0"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.313531 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.314874 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" event={"ID":"e7235695-da27-4f1d-afec-a6f2a3decc79","Type":"ContainerStarted","Data":"98e60aaf715bb0d2a6015326912976a7b93be64eff2d84efc91da6e0586c4500"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.314988 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.316205 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" event={"ID":"8d94649e-ea57-4b1a-9fb0-2b37b567cd77","Type":"ContainerStarted","Data":"2007c3308b5686eba6505c9a090439f6d4354c4dbf1b7bef5d00ac621374defb"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.316415 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.317679 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" event={"ID":"93b5a195-1f87-4eb3-8511-916e652b3913","Type":"ContainerStarted","Data":"f18c241fcd892236c5177b09b20232b7b40d02926479fdb1e3434def0ad5de74"} Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.334207 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" podStartSLOduration=3.883338332 podStartE2EDuration="56.334172742s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.272171346 +0000 UTC m=+1060.611563635" lastFinishedPulling="2025-12-05 01:28:57.723005756 +0000 UTC m=+1113.062398045" observedRunningTime="2025-12-05 01:28:58.32988846 +0000 UTC m=+1113.669280759" watchObservedRunningTime="2025-12-05 01:28:58.334172742 +0000 UTC m=+1113.673565041" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.353050 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-dmtqm" podStartSLOduration=2.551208436 podStartE2EDuration="54.353031253s" podCreationTimestamp="2025-12-05 01:28:04 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.706324885 +0000 UTC m=+1061.045717184" lastFinishedPulling="2025-12-05 01:28:57.508147712 +0000 UTC m=+1112.847540001" observedRunningTime="2025-12-05 01:28:58.350020981 +0000 UTC m=+1113.689413280" watchObservedRunningTime="2025-12-05 01:28:58.353031253 +0000 UTC m=+1113.692423552" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.423610 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" podStartSLOduration=4.522604354 podStartE2EDuration="56.423595174s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.415129263 +0000 UTC m=+1060.754521562" lastFinishedPulling="2025-12-05 01:28:57.316120083 +0000 UTC m=+1112.655512382" observedRunningTime="2025-12-05 01:28:58.419924615 +0000 UTC m=+1113.759316914" watchObservedRunningTime="2025-12-05 01:28:58.423595174 +0000 UTC m=+1113.762987473" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.424795 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" podStartSLOduration=3.224734213 podStartE2EDuration="55.424789222s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.641722742 +0000 UTC m=+1060.981115041" lastFinishedPulling="2025-12-05 01:28:57.841777751 +0000 UTC m=+1113.181170050" observedRunningTime="2025-12-05 01:28:58.38503538 +0000 UTC m=+1113.724427669" watchObservedRunningTime="2025-12-05 01:28:58.424789222 +0000 UTC m=+1113.764181521" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.499035 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" podStartSLOduration=4.165104416 podStartE2EDuration="56.49901674s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.328927261 +0000 UTC m=+1060.668319560" lastFinishedPulling="2025-12-05 01:28:57.662839585 +0000 UTC m=+1113.002231884" observedRunningTime="2025-12-05 01:28:58.471162753 +0000 UTC m=+1113.810555052" watchObservedRunningTime="2025-12-05 01:28:58.49901674 +0000 UTC m=+1113.838409039" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.670648 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" podStartSLOduration=3.867133174 podStartE2EDuration="55.670631109s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.703937329 +0000 UTC m=+1061.043329618" lastFinishedPulling="2025-12-05 01:28:57.507435254 +0000 UTC m=+1112.846827553" observedRunningTime="2025-12-05 01:28:58.581174767 +0000 UTC m=+1113.920567066" watchObservedRunningTime="2025-12-05 01:28:58.670631109 +0000 UTC m=+1114.010023408" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.682690 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" podStartSLOduration=3.348527112 podStartE2EDuration="55.682666088s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.362263173 +0000 UTC m=+1060.701655472" lastFinishedPulling="2025-12-05 01:28:57.696402149 +0000 UTC m=+1113.035794448" observedRunningTime="2025-12-05 01:28:58.665581449 +0000 UTC m=+1114.004973748" watchObservedRunningTime="2025-12-05 01:28:58.682666088 +0000 UTC m=+1114.022058387" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.762953 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" podStartSLOduration=3.580562737 podStartE2EDuration="55.762934919s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.480795682 +0000 UTC m=+1060.820187981" lastFinishedPulling="2025-12-05 01:28:57.663167864 +0000 UTC m=+1113.002560163" observedRunningTime="2025-12-05 01:28:58.761415503 +0000 UTC m=+1114.100807812" watchObservedRunningTime="2025-12-05 01:28:58.762934919 +0000 UTC m=+1114.102327218" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.799421 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" podStartSLOduration=4.60360595 podStartE2EDuration="56.799403423s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.486198882 +0000 UTC m=+1060.825591181" lastFinishedPulling="2025-12-05 01:28:57.681996355 +0000 UTC m=+1113.021388654" observedRunningTime="2025-12-05 01:28:58.793080432 +0000 UTC m=+1114.132472741" watchObservedRunningTime="2025-12-05 01:28:58.799403423 +0000 UTC m=+1114.138795722" Dec 05 01:28:58 crc kubenswrapper[4665]: I1205 01:28:58.826398 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" podStartSLOduration=4.092161738 podStartE2EDuration="55.826383308s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.772730183 +0000 UTC m=+1061.112122482" lastFinishedPulling="2025-12-05 01:28:57.506951753 +0000 UTC m=+1112.846344052" observedRunningTime="2025-12-05 01:28:58.824875182 +0000 UTC m=+1114.164267481" watchObservedRunningTime="2025-12-05 01:28:58.826383308 +0000 UTC m=+1114.165775607" Dec 05 01:28:59 crc kubenswrapper[4665]: I1205 01:28:59.325025 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" Dec 05 01:29:00 crc kubenswrapper[4665]: I1205 01:29:00.286825 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-79966545b7-vd7h4" Dec 05 01:29:03 crc kubenswrapper[4665]: I1205 01:29:03.105652 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-ptwdk" Dec 05 01:29:03 crc kubenswrapper[4665]: I1205 01:29:03.109551 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-8hblt" Dec 05 01:29:03 crc kubenswrapper[4665]: I1205 01:29:03.234458 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-dz2g4" Dec 05 01:29:03 crc kubenswrapper[4665]: I1205 01:29:03.505797 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-dxk89" Dec 05 01:29:03 crc kubenswrapper[4665]: I1205 01:29:03.577664 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-5qmj8" Dec 05 01:29:03 crc kubenswrapper[4665]: I1205 01:29:03.612493 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-fbmtc" Dec 05 01:29:03 crc kubenswrapper[4665]: I1205 01:29:03.745172 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-fgjvz" Dec 05 01:29:03 crc kubenswrapper[4665]: I1205 01:29:03.815007 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-dnqxs" Dec 05 01:29:04 crc kubenswrapper[4665]: I1205 01:29:04.346453 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-sbnx4" Dec 05 01:29:06 crc kubenswrapper[4665]: I1205 01:29:06.897838 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 01:29:08 crc kubenswrapper[4665]: I1205 01:29:08.383464 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" event={"ID":"f353bd63-e193-4a26-8ba6-32f1eec034a8","Type":"ContainerStarted","Data":"389b19c312653bae5639a2585cc9d47911fa5e7f81668b64d481ab4727f7bbf5"} Dec 05 01:29:08 crc kubenswrapper[4665]: I1205 01:29:08.384059 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" Dec 05 01:29:08 crc kubenswrapper[4665]: I1205 01:29:08.386811 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" event={"ID":"5c1d9b52-7fcd-4615-9faa-af55e4165ffb","Type":"ContainerStarted","Data":"d6e8151c3e4b518a2f5abe48afb699166b448b20094ad0c19aa46808b9c678b1"} Dec 05 01:29:08 crc kubenswrapper[4665]: I1205 01:29:08.387072 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" Dec 05 01:29:08 crc kubenswrapper[4665]: I1205 01:29:08.413782 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" podStartSLOduration=3.862915965 podStartE2EDuration="1m5.413745144s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.831533417 +0000 UTC m=+1061.170925716" lastFinishedPulling="2025-12-05 01:29:07.382362586 +0000 UTC m=+1122.721754895" observedRunningTime="2025-12-05 01:29:08.40562585 +0000 UTC m=+1123.745018169" watchObservedRunningTime="2025-12-05 01:29:08.413745144 +0000 UTC m=+1123.753137463" Dec 05 01:29:08 crc kubenswrapper[4665]: I1205 01:29:08.433077 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" podStartSLOduration=4.893424657 podStartE2EDuration="1m6.433051057s" podCreationTimestamp="2025-12-05 01:28:02 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.842749416 +0000 UTC m=+1061.182141715" lastFinishedPulling="2025-12-05 01:29:07.382375806 +0000 UTC m=+1122.721768115" observedRunningTime="2025-12-05 01:29:08.424113262 +0000 UTC m=+1123.763505581" watchObservedRunningTime="2025-12-05 01:29:08.433051057 +0000 UTC m=+1123.772443376" Dec 05 01:29:08 crc kubenswrapper[4665]: I1205 01:29:08.876026 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-57548d458d-lt2sv" Dec 05 01:29:09 crc kubenswrapper[4665]: I1205 01:29:09.484280 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9" Dec 05 01:29:10 crc kubenswrapper[4665]: I1205 01:29:10.402668 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" event={"ID":"c874c72f-9ac0-4ce5-bf5c-fc9e983b725c","Type":"ContainerStarted","Data":"0d98b0f84a14cfa47a3eea451e1aec0140f8ad78398dcf4dfdd279c9db9ea9ca"} Dec 05 01:29:10 crc kubenswrapper[4665]: I1205 01:29:10.403486 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" Dec 05 01:29:11 crc kubenswrapper[4665]: I1205 01:29:11.410841 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" event={"ID":"0c289bf9-999d-4396-b15b-b27fded35180","Type":"ContainerStarted","Data":"504a1b820bdfd4875b69563d120abd16b659d9dfc2aee32d2b1a185983a96607"} Dec 05 01:29:11 crc kubenswrapper[4665]: I1205 01:29:11.411384 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" Dec 05 01:29:11 crc kubenswrapper[4665]: I1205 01:29:11.429338 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" podStartSLOduration=4.719268671 podStartE2EDuration="1m8.429323568s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.719481542 +0000 UTC m=+1061.058873841" lastFinishedPulling="2025-12-05 01:29:09.429536449 +0000 UTC m=+1124.768928738" observedRunningTime="2025-12-05 01:29:10.428508681 +0000 UTC m=+1125.767900980" watchObservedRunningTime="2025-12-05 01:29:11.429323568 +0000 UTC m=+1126.768715867" Dec 05 01:29:11 crc kubenswrapper[4665]: I1205 01:29:11.431170 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" podStartSLOduration=3.8742067799999997 podStartE2EDuration="1m8.431164482s" podCreationTimestamp="2025-12-05 01:28:03 +0000 UTC" firstStartedPulling="2025-12-05 01:28:05.773076801 +0000 UTC m=+1061.112469100" lastFinishedPulling="2025-12-05 01:29:10.330034503 +0000 UTC m=+1125.669426802" observedRunningTime="2025-12-05 01:29:11.42861789 +0000 UTC m=+1126.768010189" watchObservedRunningTime="2025-12-05 01:29:11.431164482 +0000 UTC m=+1126.770556781" Dec 05 01:29:13 crc kubenswrapper[4665]: I1205 01:29:13.540867 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-nfmtb" Dec 05 01:29:14 crc kubenswrapper[4665]: I1205 01:29:14.225333 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-6dh9j" Dec 05 01:29:23 crc kubenswrapper[4665]: I1205 01:29:23.941418 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-nmpk2" Dec 05 01:29:24 crc kubenswrapper[4665]: I1205 01:29:24.573366 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-bk8hv" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.702168 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-w5trh"] Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.703764 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.705746 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.705786 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.706779 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.709780 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-bcbwj" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.712060 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-w5trh"] Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.793556 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-25nch"] Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.795225 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.803899 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-25nch"] Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.805225 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.833216 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86hxk\" (UniqueName: \"kubernetes.io/projected/d1516ca0-74c0-4826-8729-11834caf6031-kube-api-access-86hxk\") pod \"dnsmasq-dns-675f4bcbfc-w5trh\" (UID: \"d1516ca0-74c0-4826-8729-11834caf6031\") " pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.833324 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1516ca0-74c0-4826-8729-11834caf6031-config\") pod \"dnsmasq-dns-675f4bcbfc-w5trh\" (UID: \"d1516ca0-74c0-4826-8729-11834caf6031\") " pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.934096 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.934154 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtvz6\" (UniqueName: \"kubernetes.io/projected/a24822e3-8f2d-4220-beab-78f1f456b74d-kube-api-access-mtvz6\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.934372 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1516ca0-74c0-4826-8729-11834caf6031-config\") pod \"dnsmasq-dns-675f4bcbfc-w5trh\" (UID: \"d1516ca0-74c0-4826-8729-11834caf6031\") " pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.934464 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-config\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.934635 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86hxk\" (UniqueName: \"kubernetes.io/projected/d1516ca0-74c0-4826-8729-11834caf6031-kube-api-access-86hxk\") pod \"dnsmasq-dns-675f4bcbfc-w5trh\" (UID: \"d1516ca0-74c0-4826-8729-11834caf6031\") " pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.935323 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1516ca0-74c0-4826-8729-11834caf6031-config\") pod \"dnsmasq-dns-675f4bcbfc-w5trh\" (UID: \"d1516ca0-74c0-4826-8729-11834caf6031\") " pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:29:43 crc kubenswrapper[4665]: I1205 01:29:43.961126 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86hxk\" (UniqueName: \"kubernetes.io/projected/d1516ca0-74c0-4826-8729-11834caf6031-kube-api-access-86hxk\") pod \"dnsmasq-dns-675f4bcbfc-w5trh\" (UID: \"d1516ca0-74c0-4826-8729-11834caf6031\") " pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.019993 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.036669 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtvz6\" (UniqueName: \"kubernetes.io/projected/a24822e3-8f2d-4220-beab-78f1f456b74d-kube-api-access-mtvz6\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.036750 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-config\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.036858 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.038396 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-config\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.038506 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.060619 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtvz6\" (UniqueName: \"kubernetes.io/projected/a24822e3-8f2d-4220-beab-78f1f456b74d-kube-api-access-mtvz6\") pod \"dnsmasq-dns-78dd6ddcc-25nch\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.112568 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.532271 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-w5trh"] Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.618174 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-25nch"] Dec 05 01:29:44 crc kubenswrapper[4665]: W1205 01:29:44.626919 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda24822e3_8f2d_4220_beab_78f1f456b74d.slice/crio-c88b3a4cc0de4759668d9df0b278f24c36a6bdaec8bd244f234e8325103e9243 WatchSource:0}: Error finding container c88b3a4cc0de4759668d9df0b278f24c36a6bdaec8bd244f234e8325103e9243: Status 404 returned error can't find the container with id c88b3a4cc0de4759668d9df0b278f24c36a6bdaec8bd244f234e8325103e9243 Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.645807 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" event={"ID":"a24822e3-8f2d-4220-beab-78f1f456b74d","Type":"ContainerStarted","Data":"c88b3a4cc0de4759668d9df0b278f24c36a6bdaec8bd244f234e8325103e9243"} Dec 05 01:29:44 crc kubenswrapper[4665]: I1205 01:29:44.646918 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" event={"ID":"d1516ca0-74c0-4826-8729-11834caf6031","Type":"ContainerStarted","Data":"01e2fcf37a94487b54446b877bcc55767fee6b5d451bc67f34b433577ed28bd0"} Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.015431 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-w5trh"] Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.062898 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c2b8c"] Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.064796 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.070556 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c2b8c"] Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.199946 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-config\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.200053 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.200082 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zh2gj\" (UniqueName: \"kubernetes.io/projected/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-kube-api-access-zh2gj\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.301008 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.301065 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zh2gj\" (UniqueName: \"kubernetes.io/projected/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-kube-api-access-zh2gj\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.301100 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-config\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.302119 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-config\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.302185 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-dns-svc\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.348677 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zh2gj\" (UniqueName: \"kubernetes.io/projected/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-kube-api-access-zh2gj\") pod \"dnsmasq-dns-666b6646f7-c2b8c\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.413638 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.542975 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-25nch"] Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.585194 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-5pbm6"] Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.595160 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.604159 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-5pbm6"] Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.708985 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.709501 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-brw2p\" (UniqueName: \"kubernetes.io/projected/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-kube-api-access-brw2p\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.709553 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-config\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.810965 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-brw2p\" (UniqueName: \"kubernetes.io/projected/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-kube-api-access-brw2p\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.811044 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-config\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.811111 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.812039 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.813049 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-config\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.912175 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-brw2p\" (UniqueName: \"kubernetes.io/projected/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-kube-api-access-brw2p\") pod \"dnsmasq-dns-57d769cc4f-5pbm6\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:47 crc kubenswrapper[4665]: I1205 01:29:47.934579 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.277362 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c2b8c"] Dec 05 01:29:48 crc kubenswrapper[4665]: W1205 01:29:48.290741 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod23bb6b3a_40c6_448f_81ee_de6be2de6d5f.slice/crio-6b4ed48fc8fffe5d6939564ed4e3d978e45c1710f2f27bafb38814c44a751f55 WatchSource:0}: Error finding container 6b4ed48fc8fffe5d6939564ed4e3d978e45c1710f2f27bafb38814c44a751f55: Status 404 returned error can't find the container with id 6b4ed48fc8fffe5d6939564ed4e3d978e45c1710f2f27bafb38814c44a751f55 Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.585370 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-5pbm6"] Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.680138 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.681432 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.685515 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.685683 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.686027 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.686136 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-9fcvg" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.686311 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.686628 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.690369 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.707956 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.712084 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" event={"ID":"b76b2027-5c2b-478f-9481-fc5c07a7bcf7","Type":"ContainerStarted","Data":"8b4e64909b28b0e579c21c0d2ee8b6347bc0bdd332251e4fa4c0441ec90393d6"} Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.713436 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" event={"ID":"23bb6b3a-40c6-448f-81ee-de6be2de6d5f","Type":"ContainerStarted","Data":"6b4ed48fc8fffe5d6939564ed4e3d978e45c1710f2f27bafb38814c44a751f55"} Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.748692 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.749911 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.756075 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.758767 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mfvvg" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.758925 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.759004 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.759085 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.759081 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.759254 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.759411 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.831522 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5j4s\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-kube-api-access-f5j4s\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.831699 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/753728b2-97f7-4b79-8daf-19e01260d537-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.831792 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.831836 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.831899 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.832069 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/82ad13d8-7710-4135-9822-a96d62650e6d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.832230 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.832389 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.832412 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-config-data\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.832542 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.832800 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.832828 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.845797 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/753728b2-97f7-4b79-8daf-19e01260d537-pod-info\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.845893 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.846166 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.846353 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.846392 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/82ad13d8-7710-4135-9822-a96d62650e6d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.846429 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.846467 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.846488 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-server-conf\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.846508 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.846537 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p4s4v\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-kube-api-access-p4s4v\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.948319 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951000 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/82ad13d8-7710-4135-9822-a96d62650e6d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951049 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951079 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951129 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-server-conf\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951147 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951172 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p4s4v\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-kube-api-access-p4s4v\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951224 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5j4s\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-kube-api-access-f5j4s\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951246 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/753728b2-97f7-4b79-8daf-19e01260d537-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951268 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951284 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951319 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951336 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/82ad13d8-7710-4135-9822-a96d62650e6d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951372 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951409 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951437 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-config-data\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951451 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951474 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951490 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951523 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/753728b2-97f7-4b79-8daf-19e01260d537-pod-info\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951541 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.951561 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.952418 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.952534 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.952637 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.953979 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.954120 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.954784 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.955125 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.956073 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.957939 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/753728b2-97f7-4b79-8daf-19e01260d537-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.958935 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.959280 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.961477 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.962196 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-config-data\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.964488 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.966496 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/753728b2-97f7-4b79-8daf-19e01260d537-pod-info\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.969936 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/82ad13d8-7710-4135-9822-a96d62650e6d-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.970043 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-server-conf\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.970174 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.977940 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.979448 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/82ad13d8-7710-4135-9822-a96d62650e6d-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.980117 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p4s4v\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-kube-api-access-p4s4v\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.983946 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5j4s\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-kube-api-access-f5j4s\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:48 crc kubenswrapper[4665]: I1205 01:29:48.996910 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " pod="openstack/rabbitmq-server-0" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.009033 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.009620 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.088149 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.644529 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:29:49 crc kubenswrapper[4665]: W1205 01:29:49.657360 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod82ad13d8_7710_4135_9822_a96d62650e6d.slice/crio-92f2fcaf28e141f0c19b52536ccfbf6fa53653202bdc54ea4b7fd5c6c7d13d99 WatchSource:0}: Error finding container 92f2fcaf28e141f0c19b52536ccfbf6fa53653202bdc54ea4b7fd5c6c7d13d99: Status 404 returned error can't find the container with id 92f2fcaf28e141f0c19b52536ccfbf6fa53653202bdc54ea4b7fd5c6c7d13d99 Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.708844 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:29:49 crc kubenswrapper[4665]: W1205 01:29:49.722925 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod753728b2_97f7_4b79_8daf_19e01260d537.slice/crio-e5816000a33b0f5880cccc9f1eff44840e38e704e90e4b2bc6c9d98309229bcc WatchSource:0}: Error finding container e5816000a33b0f5880cccc9f1eff44840e38e704e90e4b2bc6c9d98309229bcc: Status 404 returned error can't find the container with id e5816000a33b0f5880cccc9f1eff44840e38e704e90e4b2bc6c9d98309229bcc Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.750977 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"82ad13d8-7710-4135-9822-a96d62650e6d","Type":"ContainerStarted","Data":"92f2fcaf28e141f0c19b52536ccfbf6fa53653202bdc54ea4b7fd5c6c7d13d99"} Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.949221 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.951062 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.956620 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.957006 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.958446 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.969448 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.970124 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-b4dgk" Dec 05 01:29:49 crc kubenswrapper[4665]: I1205 01:29:49.992634 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.084364 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/423b314f-ea29-4705-8447-3a316edd8c6b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.084415 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-config-data-default\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.084438 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-kolla-config\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.084455 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mt5ds\" (UniqueName: \"kubernetes.io/projected/423b314f-ea29-4705-8447-3a316edd8c6b-kube-api-access-mt5ds\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.084518 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.084543 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423b314f-ea29-4705-8447-3a316edd8c6b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.084573 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/423b314f-ea29-4705-8447-3a316edd8c6b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.084603 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.190341 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.190423 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423b314f-ea29-4705-8447-3a316edd8c6b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.190468 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/423b314f-ea29-4705-8447-3a316edd8c6b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.190505 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.190547 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/423b314f-ea29-4705-8447-3a316edd8c6b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.190572 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-kolla-config\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.190588 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-config-data-default\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.190607 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mt5ds\" (UniqueName: \"kubernetes.io/projected/423b314f-ea29-4705-8447-3a316edd8c6b-kube-api-access-mt5ds\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.192624 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-kolla-config\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.193473 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-operator-scripts\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.193601 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.193962 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/423b314f-ea29-4705-8447-3a316edd8c6b-config-data-generated\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.200509 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/423b314f-ea29-4705-8447-3a316edd8c6b-config-data-default\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.208555 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/423b314f-ea29-4705-8447-3a316edd8c6b-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.209005 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/423b314f-ea29-4705-8447-3a316edd8c6b-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.211972 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mt5ds\" (UniqueName: \"kubernetes.io/projected/423b314f-ea29-4705-8447-3a316edd8c6b-kube-api-access-mt5ds\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.272269 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"423b314f-ea29-4705-8447-3a316edd8c6b\") " pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.289240 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 01:29:50 crc kubenswrapper[4665]: I1205 01:29:50.816285 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"753728b2-97f7-4b79-8daf-19e01260d537","Type":"ContainerStarted","Data":"e5816000a33b0f5880cccc9f1eff44840e38e704e90e4b2bc6c9d98309229bcc"} Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.161605 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.337113 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.341382 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.357101 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-6jfdx" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.357512 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.357664 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.357946 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.388884 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.432579 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.432637 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk2wf\" (UniqueName: \"kubernetes.io/projected/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-kube-api-access-fk2wf\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.432671 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.432702 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.432736 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.432767 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.432818 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.432857 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.535132 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.535193 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.535239 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.535269 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.535471 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.535535 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.535604 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk2wf\" (UniqueName: \"kubernetes.io/projected/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-kube-api-access-fk2wf\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.535635 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.537877 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.538586 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.538500 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.539186 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.545346 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.546371 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.546540 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.566183 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk2wf\" (UniqueName: \"kubernetes.io/projected/1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6-kube-api-access-fk2wf\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.575408 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-cell1-galera-0\" (UID: \"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6\") " pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.682284 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.740926 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.741873 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.746992 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-khqcf" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.747388 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.747564 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.758946 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.848050 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/02847117-544d-400b-b9a0-4d10200e0f0d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.848164 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02847117-544d-400b-b9a0-4d10200e0f0d-kolla-config\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.848209 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02847117-544d-400b-b9a0-4d10200e0f0d-config-data\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.848241 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27xvr\" (UniqueName: \"kubernetes.io/projected/02847117-544d-400b-b9a0-4d10200e0f0d-kube-api-access-27xvr\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.848262 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02847117-544d-400b-b9a0-4d10200e0f0d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.872217 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"423b314f-ea29-4705-8447-3a316edd8c6b","Type":"ContainerStarted","Data":"968f986941c948285b216ecf4448c156516617ed68ed4fe5f8d787b54571218e"} Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.949566 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02847117-544d-400b-b9a0-4d10200e0f0d-kolla-config\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.949632 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02847117-544d-400b-b9a0-4d10200e0f0d-config-data\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.949656 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27xvr\" (UniqueName: \"kubernetes.io/projected/02847117-544d-400b-b9a0-4d10200e0f0d-kube-api-access-27xvr\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.949673 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02847117-544d-400b-b9a0-4d10200e0f0d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.949716 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/02847117-544d-400b-b9a0-4d10200e0f0d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.951004 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/02847117-544d-400b-b9a0-4d10200e0f0d-kolla-config\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.951114 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/02847117-544d-400b-b9a0-4d10200e0f0d-config-data\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.966966 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02847117-544d-400b-b9a0-4d10200e0f0d-combined-ca-bundle\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.967021 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/02847117-544d-400b-b9a0-4d10200e0f0d-memcached-tls-certs\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:51 crc kubenswrapper[4665]: I1205 01:29:51.973138 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27xvr\" (UniqueName: \"kubernetes.io/projected/02847117-544d-400b-b9a0-4d10200e0f0d-kube-api-access-27xvr\") pod \"memcached-0\" (UID: \"02847117-544d-400b-b9a0-4d10200e0f0d\") " pod="openstack/memcached-0" Dec 05 01:29:52 crc kubenswrapper[4665]: I1205 01:29:52.079704 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 01:29:52 crc kubenswrapper[4665]: I1205 01:29:52.707106 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 01:29:52 crc kubenswrapper[4665]: I1205 01:29:52.751952 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 01:29:52 crc kubenswrapper[4665]: W1205 01:29:52.843866 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod02847117_544d_400b_b9a0_4d10200e0f0d.slice/crio-149a0dfe83271ba9196acb82c98536c832bb8c745814b2f4ed6b97dd9b66a00e WatchSource:0}: Error finding container 149a0dfe83271ba9196acb82c98536c832bb8c745814b2f4ed6b97dd9b66a00e: Status 404 returned error can't find the container with id 149a0dfe83271ba9196acb82c98536c832bb8c745814b2f4ed6b97dd9b66a00e Dec 05 01:29:52 crc kubenswrapper[4665]: I1205 01:29:52.910494 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"02847117-544d-400b-b9a0-4d10200e0f0d","Type":"ContainerStarted","Data":"149a0dfe83271ba9196acb82c98536c832bb8c745814b2f4ed6b97dd9b66a00e"} Dec 05 01:29:52 crc kubenswrapper[4665]: I1205 01:29:52.910541 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6","Type":"ContainerStarted","Data":"cbf5f4fce4aa8ce6769ecddd6f4e6aeb95ac74096b8c55144a2cf561ed427645"} Dec 05 01:29:53 crc kubenswrapper[4665]: I1205 01:29:53.872321 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:29:53 crc kubenswrapper[4665]: I1205 01:29:53.875385 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 01:29:53 crc kubenswrapper[4665]: I1205 01:29:53.878157 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-prprz" Dec 05 01:29:53 crc kubenswrapper[4665]: I1205 01:29:53.908562 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:29:54 crc kubenswrapper[4665]: I1205 01:29:53.999426 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qmqjz\" (UniqueName: \"kubernetes.io/projected/d40b331f-9046-43a5-92e9-89ac2b567043-kube-api-access-qmqjz\") pod \"kube-state-metrics-0\" (UID: \"d40b331f-9046-43a5-92e9-89ac2b567043\") " pod="openstack/kube-state-metrics-0" Dec 05 01:29:54 crc kubenswrapper[4665]: I1205 01:29:54.103422 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qmqjz\" (UniqueName: \"kubernetes.io/projected/d40b331f-9046-43a5-92e9-89ac2b567043-kube-api-access-qmqjz\") pod \"kube-state-metrics-0\" (UID: \"d40b331f-9046-43a5-92e9-89ac2b567043\") " pod="openstack/kube-state-metrics-0" Dec 05 01:29:54 crc kubenswrapper[4665]: I1205 01:29:54.157707 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qmqjz\" (UniqueName: \"kubernetes.io/projected/d40b331f-9046-43a5-92e9-89ac2b567043-kube-api-access-qmqjz\") pod \"kube-state-metrics-0\" (UID: \"d40b331f-9046-43a5-92e9-89ac2b567043\") " pod="openstack/kube-state-metrics-0" Dec 05 01:29:54 crc kubenswrapper[4665]: I1205 01:29:54.213751 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 01:29:55 crc kubenswrapper[4665]: I1205 01:29:55.139762 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:29:55 crc kubenswrapper[4665]: W1205 01:29:55.189792 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd40b331f_9046_43a5_92e9_89ac2b567043.slice/crio-f0c5a153ceaa47f88c62a55ce0a4ffc182fce3e4547e4468be38dc4a6e81c5c3 WatchSource:0}: Error finding container f0c5a153ceaa47f88c62a55ce0a4ffc182fce3e4547e4468be38dc4a6e81c5c3: Status 404 returned error can't find the container with id f0c5a153ceaa47f88c62a55ce0a4ffc182fce3e4547e4468be38dc4a6e81c5c3 Dec 05 01:29:56 crc kubenswrapper[4665]: I1205 01:29:56.000171 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d40b331f-9046-43a5-92e9-89ac2b567043","Type":"ContainerStarted","Data":"f0c5a153ceaa47f88c62a55ce0a4ffc182fce3e4547e4468be38dc4a6e81c5c3"} Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.629971 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.631799 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.636513 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-5k52d" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.639384 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.640326 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.640504 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.642586 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.647741 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.685451 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.685684 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.685828 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/df5300cc-4ce3-4574-a775-595607aeddb6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.685900 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df5300cc-4ce3-4574-a775-595607aeddb6-config\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.685920 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.685941 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.686083 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8jn6\" (UniqueName: \"kubernetes.io/projected/df5300cc-4ce3-4574-a775-595607aeddb6-kube-api-access-d8jn6\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.686218 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/df5300cc-4ce3-4574-a775-595607aeddb6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.787821 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8jn6\" (UniqueName: \"kubernetes.io/projected/df5300cc-4ce3-4574-a775-595607aeddb6-kube-api-access-d8jn6\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.787941 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/df5300cc-4ce3-4574-a775-595607aeddb6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.788018 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.788277 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.788509 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/df5300cc-4ce3-4574-a775-595607aeddb6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.788561 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df5300cc-4ce3-4574-a775-595607aeddb6-config\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.788585 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.788606 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.789639 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/df5300cc-4ce3-4574-a775-595607aeddb6-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.795159 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.795636 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.796961 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/df5300cc-4ce3-4574-a775-595607aeddb6-config\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.798053 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/df5300cc-4ce3-4574-a775-595607aeddb6-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.799164 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.800323 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/df5300cc-4ce3-4574-a775-595607aeddb6-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.808490 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8jn6\" (UniqueName: \"kubernetes.io/projected/df5300cc-4ce3-4574-a775-595607aeddb6-kube-api-access-d8jn6\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.815286 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ovsdbserver-nb-0\" (UID: \"df5300cc-4ce3-4574-a775-595607aeddb6\") " pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:57 crc kubenswrapper[4665]: I1205 01:29:57.971752 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.678990 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-zr2m2"] Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.680173 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.684104 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-6hz2z" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.684511 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.684567 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.690653 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zr2m2"] Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.716632 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-run\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.716695 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41de0d44-b33f-43c4-a6c1-54830596874b-combined-ca-bundle\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.716727 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzpst\" (UniqueName: \"kubernetes.io/projected/41de0d44-b33f-43c4-a6c1-54830596874b-kube-api-access-wzpst\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.716747 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/41de0d44-b33f-43c4-a6c1-54830596874b-scripts\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.716768 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-run-ovn\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.716787 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/41de0d44-b33f-43c4-a6c1-54830596874b-ovn-controller-tls-certs\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.716809 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-log-ovn\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.752031 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-7xcgj"] Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.755251 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.774948 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7xcgj"] Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.817866 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-run\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.817930 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-lib\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.817968 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-run\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.817988 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41de0d44-b33f-43c4-a6c1-54830596874b-combined-ca-bundle\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818013 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pb2l\" (UniqueName: \"kubernetes.io/projected/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-kube-api-access-5pb2l\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818035 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzpst\" (UniqueName: \"kubernetes.io/projected/41de0d44-b33f-43c4-a6c1-54830596874b-kube-api-access-wzpst\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818053 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/41de0d44-b33f-43c4-a6c1-54830596874b-scripts\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818078 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-run-ovn\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818096 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/41de0d44-b33f-43c4-a6c1-54830596874b-ovn-controller-tls-certs\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818113 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-log\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818132 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-scripts\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818169 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-log-ovn\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.818190 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-etc-ovs\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.819456 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-run\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.819566 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-run-ovn\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.822507 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/41de0d44-b33f-43c4-a6c1-54830596874b-scripts\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.822949 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/41de0d44-b33f-43c4-a6c1-54830596874b-var-log-ovn\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.833817 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/41de0d44-b33f-43c4-a6c1-54830596874b-ovn-controller-tls-certs\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.836949 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzpst\" (UniqueName: \"kubernetes.io/projected/41de0d44-b33f-43c4-a6c1-54830596874b-kube-api-access-wzpst\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.837827 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/41de0d44-b33f-43c4-a6c1-54830596874b-combined-ca-bundle\") pod \"ovn-controller-zr2m2\" (UID: \"41de0d44-b33f-43c4-a6c1-54830596874b\") " pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.919886 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-etc-ovs\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.919979 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-lib\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.920013 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-run\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.920041 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pb2l\" (UniqueName: \"kubernetes.io/projected/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-kube-api-access-5pb2l\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.920105 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-log\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.920137 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-scripts\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.920626 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-lib\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.921314 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-etc-ovs\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.921610 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-run\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.921702 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-var-log\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.922622 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-scripts\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:58 crc kubenswrapper[4665]: I1205 01:29:58.949508 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pb2l\" (UniqueName: \"kubernetes.io/projected/ca9ae867-4d7f-4a30-ab90-ba5113fb9029-kube-api-access-5pb2l\") pod \"ovn-controller-ovs-7xcgj\" (UID: \"ca9ae867-4d7f-4a30-ab90-ba5113fb9029\") " pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:29:59 crc kubenswrapper[4665]: I1205 01:29:59.004817 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2" Dec 05 01:29:59 crc kubenswrapper[4665]: I1205 01:29:59.079425 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.141435 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4"] Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.143072 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.147053 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.147548 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.148136 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4"] Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.247628 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f5kcd\" (UniqueName: \"kubernetes.io/projected/00725aa1-db1b-4f33-8026-ba623cf93fca-kube-api-access-f5kcd\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.247713 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00725aa1-db1b-4f33-8026-ba623cf93fca-secret-volume\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.247986 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00725aa1-db1b-4f33-8026-ba623cf93fca-config-volume\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.348985 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f5kcd\" (UniqueName: \"kubernetes.io/projected/00725aa1-db1b-4f33-8026-ba623cf93fca-kube-api-access-f5kcd\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.349076 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00725aa1-db1b-4f33-8026-ba623cf93fca-secret-volume\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.349126 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00725aa1-db1b-4f33-8026-ba623cf93fca-config-volume\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.350267 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00725aa1-db1b-4f33-8026-ba623cf93fca-config-volume\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.367687 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f5kcd\" (UniqueName: \"kubernetes.io/projected/00725aa1-db1b-4f33-8026-ba623cf93fca-kube-api-access-f5kcd\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.375514 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00725aa1-db1b-4f33-8026-ba623cf93fca-secret-volume\") pod \"collect-profiles-29414970-dflj4\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.463234 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.908217 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.909387 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.911679 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-4j2vb" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.911715 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.912048 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.921371 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.929523 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.970030 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68f1be10-e014-492c-9fb2-f6131ee209d4-config\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.970084 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.970112 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.970138 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.970349 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk8fz\" (UniqueName: \"kubernetes.io/projected/68f1be10-e014-492c-9fb2-f6131ee209d4-kube-api-access-wk8fz\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.970521 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68f1be10-e014-492c-9fb2-f6131ee209d4-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.970552 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68f1be10-e014-492c-9fb2-f6131ee209d4-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:00 crc kubenswrapper[4665]: I1205 01:30:00.970593 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.071665 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68f1be10-e014-492c-9fb2-f6131ee209d4-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.071907 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68f1be10-e014-492c-9fb2-f6131ee209d4-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.072061 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.072852 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68f1be10-e014-492c-9fb2-f6131ee209d4-config\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.072969 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.073062 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.073168 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.073366 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk8fz\" (UniqueName: \"kubernetes.io/projected/68f1be10-e014-492c-9fb2-f6131ee209d4-kube-api-access-wk8fz\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.072284 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/68f1be10-e014-492c-9fb2-f6131ee209d4-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.073063 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/68f1be10-e014-492c-9fb2-f6131ee209d4-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.073634 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.073770 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/68f1be10-e014-492c-9fb2-f6131ee209d4-config\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.077555 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.078153 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.088034 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/68f1be10-e014-492c-9fb2-f6131ee209d4-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.093783 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.102597 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk8fz\" (UniqueName: \"kubernetes.io/projected/68f1be10-e014-492c-9fb2-f6131ee209d4-kube-api-access-wk8fz\") pod \"ovsdbserver-sb-0\" (UID: \"68f1be10-e014-492c-9fb2-f6131ee209d4\") " pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:01 crc kubenswrapper[4665]: I1205 01:30:01.225915 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:14 crc kubenswrapper[4665]: I1205 01:30:14.922354 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:30:14 crc kubenswrapper[4665]: I1205 01:30:14.922886 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:30:16 crc kubenswrapper[4665]: E1205 01:30:16.180941 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 01:30:16 crc kubenswrapper[4665]: E1205 01:30:16.181084 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-p4s4v,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(753728b2-97f7-4b79-8daf-19e01260d537): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:16 crc kubenswrapper[4665]: E1205 01:30:16.182430 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="753728b2-97f7-4b79-8daf-19e01260d537" Dec 05 01:30:17 crc kubenswrapper[4665]: E1205 01:30:17.191889 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="753728b2-97f7-4b79-8daf-19e01260d537" Dec 05 01:30:21 crc kubenswrapper[4665]: E1205 01:30:21.947222 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 01:30:21 crc kubenswrapper[4665]: E1205 01:30:21.947668 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mt5ds,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(423b314f-ea29-4705-8447-3a316edd8c6b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:21 crc kubenswrapper[4665]: E1205 01:30:21.948899 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="423b314f-ea29-4705-8447-3a316edd8c6b" Dec 05 01:30:21 crc kubenswrapper[4665]: E1205 01:30:21.961133 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 01:30:21 crc kubenswrapper[4665]: E1205 01:30:21.961634 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fk2wf,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:21 crc kubenswrapper[4665]: E1205 01:30:21.962794 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6" Dec 05 01:30:22 crc kubenswrapper[4665]: E1205 01:30:22.223543 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="423b314f-ea29-4705-8447-3a316edd8c6b" Dec 05 01:30:22 crc kubenswrapper[4665]: E1205 01:30:22.224085 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6" Dec 05 01:30:22 crc kubenswrapper[4665]: E1205 01:30:22.539569 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 05 01:30:22 crc kubenswrapper[4665]: E1205 01:30:22.539788 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n5d7h5f7h6bhcch554h584h5bdh599h96hd7h74h557h5b5h96h57bh55h59bh694h59h665hf5h5cdh4h68h59fh5bdh5bfh5dfh599h666hd4h5b6q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-27xvr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(02847117-544d-400b-b9a0-4d10200e0f0d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:22 crc kubenswrapper[4665]: E1205 01:30:22.541174 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="02847117-544d-400b-b9a0-4d10200e0f0d" Dec 05 01:30:22 crc kubenswrapper[4665]: E1205 01:30:22.560249 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 01:30:22 crc kubenswrapper[4665]: E1205 01:30:22.560721 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-f5j4s,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(82ad13d8-7710-4135-9822-a96d62650e6d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:22 crc kubenswrapper[4665]: E1205 01:30:22.562136 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.227958 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="02847117-544d-400b-b9a0-4d10200e0f0d" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.228289 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.361610 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.361780 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-86hxk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-w5trh_openstack(d1516ca0-74c0-4826-8729-11834caf6031): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.363046 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" podUID="d1516ca0-74c0-4826-8729-11834caf6031" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.383464 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.383628 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-brw2p,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-5pbm6_openstack(b76b2027-5c2b-478f-9481-fc5c07a7bcf7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.385048 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" podUID="b76b2027-5c2b-478f-9481-fc5c07a7bcf7" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.388638 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.388799 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mtvz6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-25nch_openstack(a24822e3-8f2d-4220-beab-78f1f456b74d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.390313 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" podUID="a24822e3-8f2d-4220-beab-78f1f456b74d" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.395899 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.396181 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zh2gj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-c2b8c_openstack(23bb6b3a-40c6-448f-81ee-de6be2de6d5f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.397435 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" podUID="23bb6b3a-40c6-448f-81ee-de6be2de6d5f" Dec 05 01:30:23 crc kubenswrapper[4665]: I1205 01:30:23.893751 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4"] Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.974716 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:16797bc33772d7e098ba8b49ca7caf9bc2850095ce37074dc4fb8d938c0ea8d5: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:16797bc33772d7e098ba8b49ca7caf9bc2850095ce37074dc4fb8d938c0ea8d5\": context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.974846 4665 kuberuntime_image.go:55] "Failed to pull image" err="rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:16797bc33772d7e098ba8b49ca7caf9bc2850095ce37074dc4fb8d938c0ea8d5: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:16797bc33772d7e098ba8b49ca7caf9bc2850095ce37074dc4fb8d938c0ea8d5\": context canceled" image="registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.977061 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-state-metrics,Image:registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0,Command:[],Args:[--resources=pods --namespaces=openstack],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:http-metrics,HostPort:0,ContainerPort:8080,Protocol:TCP,HostIP:,},ContainerPort{Name:telemetry,HostPort:0,ContainerPort:8081,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-qmqjz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/livez,Port:{0 8080 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:*true,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod kube-state-metrics-0_openstack(d40b331f-9046-43a5-92e9-89ac2b567043): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:16797bc33772d7e098ba8b49ca7caf9bc2850095ce37074dc4fb8d938c0ea8d5: Get \"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:16797bc33772d7e098ba8b49ca7caf9bc2850095ce37074dc4fb8d938c0ea8d5\": context canceled" logger="UnhandledError" Dec 05 01:30:23 crc kubenswrapper[4665]: E1205 01:30:23.980537 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: reading blob sha256:16797bc33772d7e098ba8b49ca7caf9bc2850095ce37074dc4fb8d938c0ea8d5: Get \\\"https://registry.k8s.io/v2/kube-state-metrics/kube-state-metrics/blobs/sha256:16797bc33772d7e098ba8b49ca7caf9bc2850095ce37074dc4fb8d938c0ea8d5\\\": context canceled\"" pod="openstack/kube-state-metrics-0" podUID="d40b331f-9046-43a5-92e9-89ac2b567043" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.028886 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zr2m2"] Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.124408 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.211264 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-7xcgj"] Dec 05 01:30:24 crc kubenswrapper[4665]: W1205 01:30:24.220911 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podca9ae867_4d7f_4a30_ab90_ba5113fb9029.slice/crio-195714d0b91ecd818bc92ca9c0a70c5281c6eef9a0f67b544444844710410099 WatchSource:0}: Error finding container 195714d0b91ecd818bc92ca9c0a70c5281c6eef9a0f67b544444844710410099: Status 404 returned error can't find the container with id 195714d0b91ecd818bc92ca9c0a70c5281c6eef9a0f67b544444844710410099 Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.233488 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"df5300cc-4ce3-4574-a775-595607aeddb6","Type":"ContainerStarted","Data":"79239a921ead950a007391cbef4bead5a612ad82f13e4a671cf31517c7fe3660"} Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.234609 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7xcgj" event={"ID":"ca9ae867-4d7f-4a30-ab90-ba5113fb9029","Type":"ContainerStarted","Data":"195714d0b91ecd818bc92ca9c0a70c5281c6eef9a0f67b544444844710410099"} Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.235832 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" event={"ID":"00725aa1-db1b-4f33-8026-ba623cf93fca","Type":"ContainerStarted","Data":"7f31dca1d516a7c511aca131b33be79d264115c9d8e1cae74cc1c63cf898bdad"} Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.235877 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" event={"ID":"00725aa1-db1b-4f33-8026-ba623cf93fca","Type":"ContainerStarted","Data":"6731b1ac925a69de575c3b7cddeaeda3d6101d992a9e4cb5d030de44413a6427"} Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.239134 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zr2m2" event={"ID":"41de0d44-b33f-43c4-a6c1-54830596874b","Type":"ContainerStarted","Data":"3529a48737aa1eebc90aa24b7093ed4b552016c7866a57e61afb1987bd2fc67d"} Dec 05 01:30:24 crc kubenswrapper[4665]: E1205 01:30:24.240782 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" podUID="b76b2027-5c2b-478f-9481-fc5c07a7bcf7" Dec 05 01:30:24 crc kubenswrapper[4665]: E1205 01:30:24.240884 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" podUID="23bb6b3a-40c6-448f-81ee-de6be2de6d5f" Dec 05 01:30:24 crc kubenswrapper[4665]: E1205 01:30:24.240916 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-state-metrics\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.k8s.io/kube-state-metrics/kube-state-metrics:v2.15.0\\\"\"" pod="openstack/kube-state-metrics-0" podUID="d40b331f-9046-43a5-92e9-89ac2b567043" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.254742 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" podStartSLOduration=24.254724773 podStartE2EDuration="24.254724773s" podCreationTimestamp="2025-12-05 01:30:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:30:24.252599622 +0000 UTC m=+1199.591991911" watchObservedRunningTime="2025-12-05 01:30:24.254724773 +0000 UTC m=+1199.594117072" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.523903 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.648705 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.682201 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-config\") pod \"a24822e3-8f2d-4220-beab-78f1f456b74d\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.682257 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtvz6\" (UniqueName: \"kubernetes.io/projected/a24822e3-8f2d-4220-beab-78f1f456b74d-kube-api-access-mtvz6\") pod \"a24822e3-8f2d-4220-beab-78f1f456b74d\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.682394 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-dns-svc\") pod \"a24822e3-8f2d-4220-beab-78f1f456b74d\" (UID: \"a24822e3-8f2d-4220-beab-78f1f456b74d\") " Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.682714 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-config" (OuterVolumeSpecName: "config") pod "a24822e3-8f2d-4220-beab-78f1f456b74d" (UID: "a24822e3-8f2d-4220-beab-78f1f456b74d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.683367 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a24822e3-8f2d-4220-beab-78f1f456b74d" (UID: "a24822e3-8f2d-4220-beab-78f1f456b74d"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.695498 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a24822e3-8f2d-4220-beab-78f1f456b74d-kube-api-access-mtvz6" (OuterVolumeSpecName: "kube-api-access-mtvz6") pod "a24822e3-8f2d-4220-beab-78f1f456b74d" (UID: "a24822e3-8f2d-4220-beab-78f1f456b74d"). InnerVolumeSpecName "kube-api-access-mtvz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.783225 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1516ca0-74c0-4826-8729-11834caf6031-config\") pod \"d1516ca0-74c0-4826-8729-11834caf6031\" (UID: \"d1516ca0-74c0-4826-8729-11834caf6031\") " Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.783385 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86hxk\" (UniqueName: \"kubernetes.io/projected/d1516ca0-74c0-4826-8729-11834caf6031-kube-api-access-86hxk\") pod \"d1516ca0-74c0-4826-8729-11834caf6031\" (UID: \"d1516ca0-74c0-4826-8729-11834caf6031\") " Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.784009 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.784036 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a24822e3-8f2d-4220-beab-78f1f456b74d-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.784047 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtvz6\" (UniqueName: \"kubernetes.io/projected/a24822e3-8f2d-4220-beab-78f1f456b74d-kube-api-access-mtvz6\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.784124 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d1516ca0-74c0-4826-8729-11834caf6031-config" (OuterVolumeSpecName: "config") pod "d1516ca0-74c0-4826-8729-11834caf6031" (UID: "d1516ca0-74c0-4826-8729-11834caf6031"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.786711 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d1516ca0-74c0-4826-8729-11834caf6031-kube-api-access-86hxk" (OuterVolumeSpecName: "kube-api-access-86hxk") pod "d1516ca0-74c0-4826-8729-11834caf6031" (UID: "d1516ca0-74c0-4826-8729-11834caf6031"). InnerVolumeSpecName "kube-api-access-86hxk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.893552 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d1516ca0-74c0-4826-8729-11834caf6031-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:24 crc kubenswrapper[4665]: I1205 01:30:24.893735 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86hxk\" (UniqueName: \"kubernetes.io/projected/d1516ca0-74c0-4826-8729-11834caf6031-kube-api-access-86hxk\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.041288 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.247172 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f1be10-e014-492c-9fb2-f6131ee209d4","Type":"ContainerStarted","Data":"a62c37a21bcbf9986dc3e817a0f31b25acb06d238d6ce147fd75669072ff5759"} Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.248875 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.248889 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-w5trh" event={"ID":"d1516ca0-74c0-4826-8729-11834caf6031","Type":"ContainerDied","Data":"01e2fcf37a94487b54446b877bcc55767fee6b5d451bc67f34b433577ed28bd0"} Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.252849 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" event={"ID":"a24822e3-8f2d-4220-beab-78f1f456b74d","Type":"ContainerDied","Data":"c88b3a4cc0de4759668d9df0b278f24c36a6bdaec8bd244f234e8325103e9243"} Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.252913 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-25nch" Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.262107 4665 generic.go:334] "Generic (PLEG): container finished" podID="00725aa1-db1b-4f33-8026-ba623cf93fca" containerID="7f31dca1d516a7c511aca131b33be79d264115c9d8e1cae74cc1c63cf898bdad" exitCode=0 Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.262148 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" event={"ID":"00725aa1-db1b-4f33-8026-ba623cf93fca","Type":"ContainerDied","Data":"7f31dca1d516a7c511aca131b33be79d264115c9d8e1cae74cc1c63cf898bdad"} Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.310117 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-w5trh"] Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.315734 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-w5trh"] Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.374009 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-25nch"] Dec 05 01:30:25 crc kubenswrapper[4665]: I1205 01:30:25.379789 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-25nch"] Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.530553 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.623264 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00725aa1-db1b-4f33-8026-ba623cf93fca-config-volume\") pod \"00725aa1-db1b-4f33-8026-ba623cf93fca\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.623382 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5kcd\" (UniqueName: \"kubernetes.io/projected/00725aa1-db1b-4f33-8026-ba623cf93fca-kube-api-access-f5kcd\") pod \"00725aa1-db1b-4f33-8026-ba623cf93fca\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.624625 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00725aa1-db1b-4f33-8026-ba623cf93fca-secret-volume\") pod \"00725aa1-db1b-4f33-8026-ba623cf93fca\" (UID: \"00725aa1-db1b-4f33-8026-ba623cf93fca\") " Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.624726 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/00725aa1-db1b-4f33-8026-ba623cf93fca-config-volume" (OuterVolumeSpecName: "config-volume") pod "00725aa1-db1b-4f33-8026-ba623cf93fca" (UID: "00725aa1-db1b-4f33-8026-ba623cf93fca"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.625388 4665 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/00725aa1-db1b-4f33-8026-ba623cf93fca-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.631647 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/00725aa1-db1b-4f33-8026-ba623cf93fca-kube-api-access-f5kcd" (OuterVolumeSpecName: "kube-api-access-f5kcd") pod "00725aa1-db1b-4f33-8026-ba623cf93fca" (UID: "00725aa1-db1b-4f33-8026-ba623cf93fca"). InnerVolumeSpecName "kube-api-access-f5kcd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.631875 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/00725aa1-db1b-4f33-8026-ba623cf93fca-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "00725aa1-db1b-4f33-8026-ba623cf93fca" (UID: "00725aa1-db1b-4f33-8026-ba623cf93fca"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.726340 4665 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/00725aa1-db1b-4f33-8026-ba623cf93fca-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.726370 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5kcd\" (UniqueName: \"kubernetes.io/projected/00725aa1-db1b-4f33-8026-ba623cf93fca-kube-api-access-f5kcd\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.910129 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a24822e3-8f2d-4220-beab-78f1f456b74d" path="/var/lib/kubelet/pods/a24822e3-8f2d-4220-beab-78f1f456b74d/volumes" Dec 05 01:30:26 crc kubenswrapper[4665]: I1205 01:30:26.910722 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d1516ca0-74c0-4826-8729-11834caf6031" path="/var/lib/kubelet/pods/d1516ca0-74c0-4826-8729-11834caf6031/volumes" Dec 05 01:30:27 crc kubenswrapper[4665]: I1205 01:30:27.276366 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" event={"ID":"00725aa1-db1b-4f33-8026-ba623cf93fca","Type":"ContainerDied","Data":"6731b1ac925a69de575c3b7cddeaeda3d6101d992a9e4cb5d030de44413a6427"} Dec 05 01:30:27 crc kubenswrapper[4665]: I1205 01:30:27.276411 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6731b1ac925a69de575c3b7cddeaeda3d6101d992a9e4cb5d030de44413a6427" Dec 05 01:30:27 crc kubenswrapper[4665]: I1205 01:30:27.276476 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4" Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.307252 4665 generic.go:334] "Generic (PLEG): container finished" podID="ca9ae867-4d7f-4a30-ab90-ba5113fb9029" containerID="a71af00b4bc6432fb8322846c591c29d6df1fabab162c9622e4d7cbad3b63465" exitCode=0 Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.316217 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7xcgj" event={"ID":"ca9ae867-4d7f-4a30-ab90-ba5113fb9029","Type":"ContainerDied","Data":"a71af00b4bc6432fb8322846c591c29d6df1fabab162c9622e4d7cbad3b63465"} Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.343200 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f1be10-e014-492c-9fb2-f6131ee209d4","Type":"ContainerStarted","Data":"b765743eeba9079b2c44b7491641dc24492b5f2475e59b0b66369e2026252b04"} Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.346850 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zr2m2" event={"ID":"41de0d44-b33f-43c4-a6c1-54830596874b","Type":"ContainerStarted","Data":"c34aa92afdabe01e56297a01d4a03bfd7ce7eeab06d29929dd9438fd8cb699f0"} Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.348499 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-zr2m2" Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.350023 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"df5300cc-4ce3-4574-a775-595607aeddb6","Type":"ContainerStarted","Data":"63e412a9d742dcccbdd0e9d84bbf322f7c0cde2803f1a6f81e65b529af79f25d"} Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.375101 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-zr2m2" podStartSLOduration=26.997540016 podStartE2EDuration="32.375084002s" podCreationTimestamp="2025-12-05 01:29:58 +0000 UTC" firstStartedPulling="2025-12-05 01:30:24.036827276 +0000 UTC m=+1199.376219575" lastFinishedPulling="2025-12-05 01:30:29.414371262 +0000 UTC m=+1204.753763561" observedRunningTime="2025-12-05 01:30:30.368756791 +0000 UTC m=+1205.708149090" watchObservedRunningTime="2025-12-05 01:30:30.375084002 +0000 UTC m=+1205.714476301" Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.898519 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-ts2h8"] Dec 05 01:30:30 crc kubenswrapper[4665]: E1205 01:30:30.898942 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="00725aa1-db1b-4f33-8026-ba623cf93fca" containerName="collect-profiles" Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.898960 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="00725aa1-db1b-4f33-8026-ba623cf93fca" containerName="collect-profiles" Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.899144 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="00725aa1-db1b-4f33-8026-ba623cf93fca" containerName="collect-profiles" Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.906443 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.912608 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 01:30:30 crc kubenswrapper[4665]: I1205 01:30:30.925630 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ts2h8"] Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.019428 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e65a93ed-16e2-4cf7-a295-a3517e553335-config\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.019545 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e65a93ed-16e2-4cf7-a295-a3517e553335-ovn-rundir\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.019571 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t48mp\" (UniqueName: \"kubernetes.io/projected/e65a93ed-16e2-4cf7-a295-a3517e553335-kube-api-access-t48mp\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.019634 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e65a93ed-16e2-4cf7-a295-a3517e553335-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.019671 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e65a93ed-16e2-4cf7-a295-a3517e553335-combined-ca-bundle\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.019697 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e65a93ed-16e2-4cf7-a295-a3517e553335-ovs-rundir\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.120995 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e65a93ed-16e2-4cf7-a295-a3517e553335-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.121339 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e65a93ed-16e2-4cf7-a295-a3517e553335-combined-ca-bundle\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.121372 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e65a93ed-16e2-4cf7-a295-a3517e553335-ovs-rundir\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.121430 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e65a93ed-16e2-4cf7-a295-a3517e553335-config\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.121489 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e65a93ed-16e2-4cf7-a295-a3517e553335-ovn-rundir\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.121507 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t48mp\" (UniqueName: \"kubernetes.io/projected/e65a93ed-16e2-4cf7-a295-a3517e553335-kube-api-access-t48mp\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.122552 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/e65a93ed-16e2-4cf7-a295-a3517e553335-ovs-rundir\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.123231 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e65a93ed-16e2-4cf7-a295-a3517e553335-config\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.123281 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/e65a93ed-16e2-4cf7-a295-a3517e553335-ovn-rundir\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.129864 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/e65a93ed-16e2-4cf7-a295-a3517e553335-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.150861 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e65a93ed-16e2-4cf7-a295-a3517e553335-combined-ca-bundle\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.153136 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t48mp\" (UniqueName: \"kubernetes.io/projected/e65a93ed-16e2-4cf7-a295-a3517e553335-kube-api-access-t48mp\") pod \"ovn-controller-metrics-ts2h8\" (UID: \"e65a93ed-16e2-4cf7-a295-a3517e553335\") " pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.241261 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-ts2h8" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.379698 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c2b8c"] Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.389984 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7xcgj" event={"ID":"ca9ae867-4d7f-4a30-ab90-ba5113fb9029","Type":"ContainerStarted","Data":"dd3a8c47743e46514aae18966cfbbe40e99bff6ca4b1126eb32d23dc49393152"} Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.390034 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-7xcgj" event={"ID":"ca9ae867-4d7f-4a30-ab90-ba5113fb9029","Type":"ContainerStarted","Data":"be75153de677cfa04febb11bc75169553de6c6ffde050d2e4a70636181602600"} Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.390411 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.390514 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.408749 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"753728b2-97f7-4b79-8daf-19e01260d537","Type":"ContainerStarted","Data":"ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed"} Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.537768 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-8frtw"] Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.547550 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.576200 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.633176 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dq9pz\" (UniqueName: \"kubernetes.io/projected/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-kube-api-access-dq9pz\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.633245 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-config\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.633441 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.633537 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.636804 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-8frtw"] Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.676620 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-7xcgj" podStartSLOduration=28.492149096 podStartE2EDuration="33.676600535s" podCreationTimestamp="2025-12-05 01:29:58 +0000 UTC" firstStartedPulling="2025-12-05 01:30:24.222927526 +0000 UTC m=+1199.562319825" lastFinishedPulling="2025-12-05 01:30:29.407378965 +0000 UTC m=+1204.746771264" observedRunningTime="2025-12-05 01:30:31.659794355 +0000 UTC m=+1206.999186664" watchObservedRunningTime="2025-12-05 01:30:31.676600535 +0000 UTC m=+1207.015992834" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.735130 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-config\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.735232 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.735263 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.735290 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dq9pz\" (UniqueName: \"kubernetes.io/projected/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-kube-api-access-dq9pz\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.736264 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-config\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.736592 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-ovsdbserver-nb\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.736735 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-dns-svc\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.778261 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dq9pz\" (UniqueName: \"kubernetes.io/projected/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-kube-api-access-dq9pz\") pod \"dnsmasq-dns-7fd796d7df-8frtw\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.881373 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:31 crc kubenswrapper[4665]: I1205 01:30:31.946536 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-5pbm6"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.030518 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wkgb5"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.032601 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.057425 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.063456 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wkgb5"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.071391 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.071461 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.071485 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.071505 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4759l\" (UniqueName: \"kubernetes.io/projected/e8114fb6-6378-4fca-969d-c611a6c5f330-kube-api-access-4759l\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.071954 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-config\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.174642 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-config\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.174757 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.174835 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.174858 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.174882 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4759l\" (UniqueName: \"kubernetes.io/projected/e8114fb6-6378-4fca-969d-c611a6c5f330-kube-api-access-4759l\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.176204 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-config\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.176832 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-dns-svc\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.177472 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-sb\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.178118 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-nb\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.194136 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-ts2h8"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.200738 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4759l\" (UniqueName: \"kubernetes.io/projected/e8114fb6-6378-4fca-969d-c611a6c5f330-kube-api-access-4759l\") pod \"dnsmasq-dns-86db49b7ff-wkgb5\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: W1205 01:30:32.205763 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode65a93ed_16e2_4cf7_a295_a3517e553335.slice/crio-09e120fd6a27380407b6bdcacf7b8d16c81f4a90518116ddb09fe6067095fab6 WatchSource:0}: Error finding container 09e120fd6a27380407b6bdcacf7b8d16c81f4a90518116ddb09fe6067095fab6: Status 404 returned error can't find the container with id 09e120fd6a27380407b6bdcacf7b8d16c81f4a90518116ddb09fe6067095fab6 Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.231423 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.276031 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-dns-svc\") pod \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.276197 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-config\") pod \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.276333 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zh2gj\" (UniqueName: \"kubernetes.io/projected/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-kube-api-access-zh2gj\") pod \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\" (UID: \"23bb6b3a-40c6-448f-81ee-de6be2de6d5f\") " Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.277899 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "23bb6b3a-40c6-448f-81ee-de6be2de6d5f" (UID: "23bb6b3a-40c6-448f-81ee-de6be2de6d5f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.279714 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-config" (OuterVolumeSpecName: "config") pod "23bb6b3a-40c6-448f-81ee-de6be2de6d5f" (UID: "23bb6b3a-40c6-448f-81ee-de6be2de6d5f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.282794 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-kube-api-access-zh2gj" (OuterVolumeSpecName: "kube-api-access-zh2gj") pod "23bb6b3a-40c6-448f-81ee-de6be2de6d5f" (UID: "23bb6b3a-40c6-448f-81ee-de6be2de6d5f"). InnerVolumeSpecName "kube-api-access-zh2gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.363430 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.377807 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-config\") pod \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.377891 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-dns-svc\") pod \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.378010 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-brw2p\" (UniqueName: \"kubernetes.io/projected/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-kube-api-access-brw2p\") pod \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\" (UID: \"b76b2027-5c2b-478f-9481-fc5c07a7bcf7\") " Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.378448 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-config" (OuterVolumeSpecName: "config") pod "b76b2027-5c2b-478f-9481-fc5c07a7bcf7" (UID: "b76b2027-5c2b-478f-9481-fc5c07a7bcf7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.378582 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b76b2027-5c2b-478f-9481-fc5c07a7bcf7" (UID: "b76b2027-5c2b-478f-9481-fc5c07a7bcf7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.378649 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.378703 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.378730 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.378741 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zh2gj\" (UniqueName: \"kubernetes.io/projected/23bb6b3a-40c6-448f-81ee-de6be2de6d5f-kube-api-access-zh2gj\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.381070 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-kube-api-access-brw2p" (OuterVolumeSpecName: "kube-api-access-brw2p") pod "b76b2027-5c2b-478f-9481-fc5c07a7bcf7" (UID: "b76b2027-5c2b-478f-9481-fc5c07a7bcf7"). InnerVolumeSpecName "kube-api-access-brw2p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.391407 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.454019 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" event={"ID":"23bb6b3a-40c6-448f-81ee-de6be2de6d5f","Type":"ContainerDied","Data":"6b4ed48fc8fffe5d6939564ed4e3d978e45c1710f2f27bafb38814c44a751f55"} Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.454182 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-c2b8c" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.467315 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ts2h8" event={"ID":"e65a93ed-16e2-4cf7-a295-a3517e553335","Type":"ContainerStarted","Data":"09e120fd6a27380407b6bdcacf7b8d16c81f4a90518116ddb09fe6067095fab6"} Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.468748 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" event={"ID":"b76b2027-5c2b-478f-9481-fc5c07a7bcf7","Type":"ContainerDied","Data":"8b4e64909b28b0e579c21c0d2ee8b6347bc0bdd332251e4fa4c0441ec90393d6"} Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.468868 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-5pbm6" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.481071 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.481103 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-brw2p\" (UniqueName: \"kubernetes.io/projected/b76b2027-5c2b-478f-9481-fc5c07a7bcf7-kube-api-access-brw2p\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.557178 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c2b8c"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.565470 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-c2b8c"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.599876 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-5pbm6"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.607694 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-5pbm6"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.662621 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-8frtw"] Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.916811 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="23bb6b3a-40c6-448f-81ee-de6be2de6d5f" path="/var/lib/kubelet/pods/23bb6b3a-40c6-448f-81ee-de6be2de6d5f/volumes" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.917470 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b76b2027-5c2b-478f-9481-fc5c07a7bcf7" path="/var/lib/kubelet/pods/b76b2027-5c2b-478f-9481-fc5c07a7bcf7/volumes" Dec 05 01:30:32 crc kubenswrapper[4665]: I1205 01:30:32.995907 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wkgb5"] Dec 05 01:30:33 crc kubenswrapper[4665]: I1205 01:30:33.477990 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" event={"ID":"e8114fb6-6378-4fca-969d-c611a6c5f330","Type":"ContainerStarted","Data":"90ce53a3d244c810b4808524a42c15bbd68c80c3644a0b46e2fc0e26101ea5bc"} Dec 05 01:30:33 crc kubenswrapper[4665]: I1205 01:30:33.480210 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" event={"ID":"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5","Type":"ContainerStarted","Data":"22df2ee3bdeecb5b5854622b6b0534b3bba2eda38d0ca02d5053d64da72912ae"} Dec 05 01:30:34 crc kubenswrapper[4665]: I1205 01:30:34.491693 4665 generic.go:334] "Generic (PLEG): container finished" podID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" containerID="fb2f99de362d435967aa88dbad7bc02f8ebb6d117c6c2a5e2a1d270fd62e0d06" exitCode=0 Dec 05 01:30:34 crc kubenswrapper[4665]: I1205 01:30:34.491740 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" event={"ID":"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5","Type":"ContainerDied","Data":"fb2f99de362d435967aa88dbad7bc02f8ebb6d117c6c2a5e2a1d270fd62e0d06"} Dec 05 01:30:40 crc kubenswrapper[4665]: I1205 01:30:40.533810 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" event={"ID":"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5","Type":"ContainerStarted","Data":"824411e3c9ebd9d81b7cc5ddc3924af44936925eef3a587c6a1158ea412cbfef"} Dec 05 01:30:40 crc kubenswrapper[4665]: I1205 01:30:40.534366 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:40 crc kubenswrapper[4665]: I1205 01:30:40.536155 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"82ad13d8-7710-4135-9822-a96d62650e6d","Type":"ContainerStarted","Data":"21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993"} Dec 05 01:30:40 crc kubenswrapper[4665]: I1205 01:30:40.558328 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" podStartSLOduration=9.054732672 podStartE2EDuration="9.55831078s" podCreationTimestamp="2025-12-05 01:30:31 +0000 UTC" firstStartedPulling="2025-12-05 01:30:32.67320758 +0000 UTC m=+1208.012599879" lastFinishedPulling="2025-12-05 01:30:33.176785688 +0000 UTC m=+1208.516177987" observedRunningTime="2025-12-05 01:30:40.550963005 +0000 UTC m=+1215.890355304" watchObservedRunningTime="2025-12-05 01:30:40.55831078 +0000 UTC m=+1215.897703079" Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.555445 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"02847117-544d-400b-b9a0-4d10200e0f0d","Type":"ContainerStarted","Data":"1d5934ae89efd8437d16b9c9ddc7fde87fabb1eb3df59e4fa24ee0adfdefba07"} Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.556191 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.558579 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6","Type":"ContainerStarted","Data":"4090fde2560b094776f59b06274503ffdae93b3db71ae49b85402cf03e5bdc3b"} Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.563670 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"423b314f-ea29-4705-8447-3a316edd8c6b","Type":"ContainerStarted","Data":"83d78038b95208d8e86e784947c664b3a7d04147f93ca4965be21be7eaf7a468"} Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.567872 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-ts2h8" event={"ID":"e65a93ed-16e2-4cf7-a295-a3517e553335","Type":"ContainerStarted","Data":"278d87ac712efdf1f0bd0107d5042ffc0ad542d9e62a11e390f4926a00ff98ca"} Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.576540 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"68f1be10-e014-492c-9fb2-f6131ee209d4","Type":"ContainerStarted","Data":"02e4cdbdac1aeef2c7bb599ff4b4dafd1810265114c47296a7e5985522e44500"} Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.581776 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.8737739060000003 podStartE2EDuration="51.581754379s" podCreationTimestamp="2025-12-05 01:29:51 +0000 UTC" firstStartedPulling="2025-12-05 01:29:52.85658093 +0000 UTC m=+1168.195973229" lastFinishedPulling="2025-12-05 01:30:41.564561403 +0000 UTC m=+1216.903953702" observedRunningTime="2025-12-05 01:30:42.574942996 +0000 UTC m=+1217.914335315" watchObservedRunningTime="2025-12-05 01:30:42.581754379 +0000 UTC m=+1217.921146678" Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.581955 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"df5300cc-4ce3-4574-a775-595607aeddb6","Type":"ContainerStarted","Data":"7ae33077643b28e5b58b4c1b6f1f5fff658677974871eb4bdb0d332f51368b8e"} Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.584918 4665 generic.go:334] "Generic (PLEG): container finished" podID="e8114fb6-6378-4fca-969d-c611a6c5f330" containerID="a3a97311cfddd4e5c9d954502f159b1d28c3dd01c0f51d640ae0f244f3a9ad5b" exitCode=0 Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.584965 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" event={"ID":"e8114fb6-6378-4fca-969d-c611a6c5f330","Type":"ContainerDied","Data":"a3a97311cfddd4e5c9d954502f159b1d28c3dd01c0f51d640ae0f244f3a9ad5b"} Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.649636 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-ts2h8" podStartSLOduration=3.250904462 podStartE2EDuration="12.649617584s" podCreationTimestamp="2025-12-05 01:30:30 +0000 UTC" firstStartedPulling="2025-12-05 01:30:32.208678671 +0000 UTC m=+1207.548070970" lastFinishedPulling="2025-12-05 01:30:41.607391803 +0000 UTC m=+1216.946784092" observedRunningTime="2025-12-05 01:30:42.647110285 +0000 UTC m=+1217.986502584" watchObservedRunningTime="2025-12-05 01:30:42.649617584 +0000 UTC m=+1217.989009893" Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.746027 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=27.148710711 podStartE2EDuration="43.746011659s" podCreationTimestamp="2025-12-05 01:29:59 +0000 UTC" firstStartedPulling="2025-12-05 01:30:25.04304932 +0000 UTC m=+1200.382441619" lastFinishedPulling="2025-12-05 01:30:41.640350268 +0000 UTC m=+1216.979742567" observedRunningTime="2025-12-05 01:30:42.704217134 +0000 UTC m=+1218.043609433" watchObservedRunningTime="2025-12-05 01:30:42.746011659 +0000 UTC m=+1218.085403958" Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.751053 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=29.314324587 podStartE2EDuration="46.751019818s" podCreationTimestamp="2025-12-05 01:29:56 +0000 UTC" firstStartedPulling="2025-12-05 01:30:24.1407621 +0000 UTC m=+1199.480154399" lastFinishedPulling="2025-12-05 01:30:41.577457331 +0000 UTC m=+1216.916849630" observedRunningTime="2025-12-05 01:30:42.743915049 +0000 UTC m=+1218.083307348" watchObservedRunningTime="2025-12-05 01:30:42.751019818 +0000 UTC m=+1218.090412117" Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.972052 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 01:30:42 crc kubenswrapper[4665]: I1205 01:30:42.972088 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.131269 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.227033 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.260709 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.593151 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d40b331f-9046-43a5-92e9-89ac2b567043","Type":"ContainerStarted","Data":"a9ff8bdcd3b3ecf7b32a9a7e0ec7c0f8f50930d24164db0958d82ef6e7f35912"} Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.593672 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.595597 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" event={"ID":"e8114fb6-6378-4fca-969d-c611a6c5f330","Type":"ContainerStarted","Data":"0476a292ff7105b4c65527a995b5ca1c9d969134367890a7de319069f8242c60"} Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.596055 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.618468 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.030459414 podStartE2EDuration="50.618443907s" podCreationTimestamp="2025-12-05 01:29:53 +0000 UTC" firstStartedPulling="2025-12-05 01:29:55.198438788 +0000 UTC m=+1170.537831087" lastFinishedPulling="2025-12-05 01:30:42.786423281 +0000 UTC m=+1218.125815580" observedRunningTime="2025-12-05 01:30:43.615096338 +0000 UTC m=+1218.954488657" watchObservedRunningTime="2025-12-05 01:30:43.618443907 +0000 UTC m=+1218.957836226" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.642596 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" podStartSLOduration=4.13774122 podStartE2EDuration="12.642575803s" podCreationTimestamp="2025-12-05 01:30:31 +0000 UTC" firstStartedPulling="2025-12-05 01:30:33.027169526 +0000 UTC m=+1208.366561825" lastFinishedPulling="2025-12-05 01:30:41.532004109 +0000 UTC m=+1216.871396408" observedRunningTime="2025-12-05 01:30:43.637075272 +0000 UTC m=+1218.976467581" watchObservedRunningTime="2025-12-05 01:30:43.642575803 +0000 UTC m=+1218.981968102" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.655014 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 01:30:43 crc kubenswrapper[4665]: I1205 01:30:43.656587 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.047229 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.048780 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.050657 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.050796 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.051112 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-vdclz" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.051116 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.072378 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.134627 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.134715 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acb7355b-b799-4b12-a8f1-75dd2439696f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.134740 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.134761 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acb7355b-b799-4b12-a8f1-75dd2439696f-config\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.134812 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.135025 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acb7355b-b799-4b12-a8f1-75dd2439696f-scripts\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.135105 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h5zth\" (UniqueName: \"kubernetes.io/projected/acb7355b-b799-4b12-a8f1-75dd2439696f-kube-api-access-h5zth\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.236824 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acb7355b-b799-4b12-a8f1-75dd2439696f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.236883 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.236919 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acb7355b-b799-4b12-a8f1-75dd2439696f-config\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.236973 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.237021 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acb7355b-b799-4b12-a8f1-75dd2439696f-scripts\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.237047 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h5zth\" (UniqueName: \"kubernetes.io/projected/acb7355b-b799-4b12-a8f1-75dd2439696f-kube-api-access-h5zth\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.237114 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.237545 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/acb7355b-b799-4b12-a8f1-75dd2439696f-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.238030 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/acb7355b-b799-4b12-a8f1-75dd2439696f-config\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.238131 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/acb7355b-b799-4b12-a8f1-75dd2439696f-scripts\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.243216 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.243684 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.260013 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h5zth\" (UniqueName: \"kubernetes.io/projected/acb7355b-b799-4b12-a8f1-75dd2439696f-kube-api-access-h5zth\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.261528 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/acb7355b-b799-4b12-a8f1-75dd2439696f-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"acb7355b-b799-4b12-a8f1-75dd2439696f\") " pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.368868 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.603585 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.806965 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 01:30:44 crc kubenswrapper[4665]: W1205 01:30:44.815714 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podacb7355b_b799_4b12_a8f1_75dd2439696f.slice/crio-2bf338485670a9a76fb9e9c9cddae7c74afb33551e07824bbcc463dbb36858e0 WatchSource:0}: Error finding container 2bf338485670a9a76fb9e9c9cddae7c74afb33551e07824bbcc463dbb36858e0: Status 404 returned error can't find the container with id 2bf338485670a9a76fb9e9c9cddae7c74afb33551e07824bbcc463dbb36858e0 Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.922764 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:30:44 crc kubenswrapper[4665]: I1205 01:30:44.922825 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:30:45 crc kubenswrapper[4665]: I1205 01:30:45.622544 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"acb7355b-b799-4b12-a8f1-75dd2439696f","Type":"ContainerStarted","Data":"2bf338485670a9a76fb9e9c9cddae7c74afb33551e07824bbcc463dbb36858e0"} Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.645467 4665 generic.go:334] "Generic (PLEG): container finished" podID="423b314f-ea29-4705-8447-3a316edd8c6b" containerID="83d78038b95208d8e86e784947c664b3a7d04147f93ca4965be21be7eaf7a468" exitCode=0 Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.646664 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"423b314f-ea29-4705-8447-3a316edd8c6b","Type":"ContainerDied","Data":"83d78038b95208d8e86e784947c664b3a7d04147f93ca4965be21be7eaf7a468"} Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.649388 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6","Type":"ContainerDied","Data":"4090fde2560b094776f59b06274503ffdae93b3db71ae49b85402cf03e5bdc3b"} Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.649509 4665 generic.go:334] "Generic (PLEG): container finished" podID="1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6" containerID="4090fde2560b094776f59b06274503ffdae93b3db71ae49b85402cf03e5bdc3b" exitCode=0 Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.654960 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"acb7355b-b799-4b12-a8f1-75dd2439696f","Type":"ContainerStarted","Data":"f8d7b25e7435a9d46b11882a7877006d6180ad0042a6d633474ad092fdb90048"} Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.655014 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"acb7355b-b799-4b12-a8f1-75dd2439696f","Type":"ContainerStarted","Data":"6e02c1040f88a518c9065eaf72750ce569de7974552536c2d3630168114ee5b5"} Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.655210 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.735400 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=1.635012454 podStartE2EDuration="2.735382709s" podCreationTimestamp="2025-12-05 01:30:44 +0000 UTC" firstStartedPulling="2025-12-05 01:30:44.818121107 +0000 UTC m=+1220.157513406" lastFinishedPulling="2025-12-05 01:30:45.918491362 +0000 UTC m=+1221.257883661" observedRunningTime="2025-12-05 01:30:46.695711744 +0000 UTC m=+1222.035104053" watchObservedRunningTime="2025-12-05 01:30:46.735382709 +0000 UTC m=+1222.074774998" Dec 05 01:30:46 crc kubenswrapper[4665]: I1205 01:30:46.884453 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:47 crc kubenswrapper[4665]: I1205 01:30:47.082462 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 01:30:47 crc kubenswrapper[4665]: I1205 01:30:47.664259 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6","Type":"ContainerStarted","Data":"359c9760d8752c26469fc1b1333b7fe4f21b8ee02e85d2cfb5cd314ae72288b5"} Dec 05 01:30:47 crc kubenswrapper[4665]: I1205 01:30:47.667158 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"423b314f-ea29-4705-8447-3a316edd8c6b","Type":"ContainerStarted","Data":"150ed1509fcfd30a4e8ebf48202e44facba1dde11fc79e04ab8cdc730b153f0d"} Dec 05 01:30:47 crc kubenswrapper[4665]: I1205 01:30:47.702885 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=8.817910155 podStartE2EDuration="57.702867141s" podCreationTimestamp="2025-12-05 01:29:50 +0000 UTC" firstStartedPulling="2025-12-05 01:29:52.722424837 +0000 UTC m=+1168.061817136" lastFinishedPulling="2025-12-05 01:30:41.607381813 +0000 UTC m=+1216.946774122" observedRunningTime="2025-12-05 01:30:47.693591659 +0000 UTC m=+1223.032983978" watchObservedRunningTime="2025-12-05 01:30:47.702867141 +0000 UTC m=+1223.042259450" Dec 05 01:30:47 crc kubenswrapper[4665]: I1205 01:30:47.724526 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=9.293041093 podStartE2EDuration="59.724508325s" podCreationTimestamp="2025-12-05 01:29:48 +0000 UTC" firstStartedPulling="2025-12-05 01:29:51.16577799 +0000 UTC m=+1166.505170289" lastFinishedPulling="2025-12-05 01:30:41.597245222 +0000 UTC m=+1216.936637521" observedRunningTime="2025-12-05 01:30:47.717045328 +0000 UTC m=+1223.056437637" watchObservedRunningTime="2025-12-05 01:30:47.724508325 +0000 UTC m=+1223.063900624" Dec 05 01:30:50 crc kubenswrapper[4665]: I1205 01:30:50.289786 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 01:30:50 crc kubenswrapper[4665]: I1205 01:30:50.290124 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 01:30:51 crc kubenswrapper[4665]: I1205 01:30:51.683702 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 01:30:51 crc kubenswrapper[4665]: I1205 01:30:51.683741 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 01:30:51 crc kubenswrapper[4665]: I1205 01:30:51.744939 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 01:30:51 crc kubenswrapper[4665]: I1205 01:30:51.832559 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 01:30:52 crc kubenswrapper[4665]: I1205 01:30:52.351133 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 01:30:52 crc kubenswrapper[4665]: I1205 01:30:52.392494 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:30:52 crc kubenswrapper[4665]: I1205 01:30:52.470227 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-8frtw"] Dec 05 01:30:52 crc kubenswrapper[4665]: I1205 01:30:52.470528 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" podUID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" containerName="dnsmasq-dns" containerID="cri-o://824411e3c9ebd9d81b7cc5ddc3924af44936925eef3a587c6a1158ea412cbfef" gracePeriod=10 Dec 05 01:30:52 crc kubenswrapper[4665]: I1205 01:30:52.481614 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 01:30:52 crc kubenswrapper[4665]: I1205 01:30:52.703222 4665 generic.go:334] "Generic (PLEG): container finished" podID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" containerID="824411e3c9ebd9d81b7cc5ddc3924af44936925eef3a587c6a1158ea412cbfef" exitCode=0 Dec 05 01:30:52 crc kubenswrapper[4665]: I1205 01:30:52.704426 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" event={"ID":"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5","Type":"ContainerDied","Data":"824411e3c9ebd9d81b7cc5ddc3924af44936925eef3a587c6a1158ea412cbfef"} Dec 05 01:30:52 crc kubenswrapper[4665]: I1205 01:30:52.961523 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.109213 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-ovsdbserver-nb\") pod \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.109281 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dq9pz\" (UniqueName: \"kubernetes.io/projected/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-kube-api-access-dq9pz\") pod \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.109495 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-config\") pod \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.109538 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-dns-svc\") pod \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\" (UID: \"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5\") " Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.124308 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-kube-api-access-dq9pz" (OuterVolumeSpecName: "kube-api-access-dq9pz") pod "09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" (UID: "09e5d8e3-0bdb-41d1-8696-a855cc5b9af5"). InnerVolumeSpecName "kube-api-access-dq9pz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.155308 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" (UID: "09e5d8e3-0bdb-41d1-8696-a855cc5b9af5"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.155772 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-config" (OuterVolumeSpecName: "config") pod "09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" (UID: "09e5d8e3-0bdb-41d1-8696-a855cc5b9af5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.156830 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" (UID: "09e5d8e3-0bdb-41d1-8696-a855cc5b9af5"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.213305 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.213341 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dq9pz\" (UniqueName: \"kubernetes.io/projected/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-kube-api-access-dq9pz\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.213353 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.213364 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.712369 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" event={"ID":"09e5d8e3-0bdb-41d1-8696-a855cc5b9af5","Type":"ContainerDied","Data":"22df2ee3bdeecb5b5854622b6b0534b3bba2eda38d0ca02d5053d64da72912ae"} Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.712420 4665 scope.go:117] "RemoveContainer" containerID="824411e3c9ebd9d81b7cc5ddc3924af44936925eef3a587c6a1158ea412cbfef" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.712440 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7fd796d7df-8frtw" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.733638 4665 scope.go:117] "RemoveContainer" containerID="fb2f99de362d435967aa88dbad7bc02f8ebb6d117c6c2a5e2a1d270fd62e0d06" Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.745287 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-8frtw"] Dec 05 01:30:53 crc kubenswrapper[4665]: I1205 01:30:53.754551 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7fd796d7df-8frtw"] Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.227119 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.344309 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-698758b865-zpx6p"] Dec 05 01:30:54 crc kubenswrapper[4665]: E1205 01:30:54.350344 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" containerName="dnsmasq-dns" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.350386 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" containerName="dnsmasq-dns" Dec 05 01:30:54 crc kubenswrapper[4665]: E1205 01:30:54.350416 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" containerName="init" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.350425 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" containerName="init" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.350802 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" containerName="dnsmasq-dns" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.351918 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.430531 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k7mw9\" (UniqueName: \"kubernetes.io/projected/b63eb623-410f-4130-bf07-845b294c89f1-kube-api-access-k7mw9\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.430648 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-config\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.430711 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.430739 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.430773 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-dns-svc\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.431529 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-zpx6p"] Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.532192 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.532251 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.532311 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-dns-svc\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.532350 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k7mw9\" (UniqueName: \"kubernetes.io/projected/b63eb623-410f-4130-bf07-845b294c89f1-kube-api-access-k7mw9\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.532434 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-config\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.533231 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-nb\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.533693 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-sb\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.533723 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-config\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.533830 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-dns-svc\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.570115 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k7mw9\" (UniqueName: \"kubernetes.io/projected/b63eb623-410f-4130-bf07-845b294c89f1-kube-api-access-k7mw9\") pod \"dnsmasq-dns-698758b865-zpx6p\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.669613 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:54 crc kubenswrapper[4665]: I1205 01:30:54.908849 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09e5d8e3-0bdb-41d1-8696-a855cc5b9af5" path="/var/lib/kubelet/pods/09e5d8e3-0bdb-41d1-8696-a855cc5b9af5/volumes" Dec 05 01:30:55 crc kubenswrapper[4665]: W1205 01:30:55.222814 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb63eb623_410f_4130_bf07_845b294c89f1.slice/crio-03332b114e0c63e745eff2bb3fa4cb9d0a33a096193e69729a43f0dae8275372 WatchSource:0}: Error finding container 03332b114e0c63e745eff2bb3fa4cb9d0a33a096193e69729a43f0dae8275372: Status 404 returned error can't find the container with id 03332b114e0c63e745eff2bb3fa4cb9d0a33a096193e69729a43f0dae8275372 Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.226758 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-698758b865-zpx6p"] Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.492240 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.501411 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.504282 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.504698 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-tfmdg" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.505650 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.506551 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.532267 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.652478 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-lock\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.652863 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.652897 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-cache\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.652946 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.652986 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w25cw\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-kube-api-access-w25cw\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.744642 4665 generic.go:334] "Generic (PLEG): container finished" podID="b63eb623-410f-4130-bf07-845b294c89f1" containerID="afbf4edaf46191dc5f10d5be8b1236caf66fc4379cc71d443a41de7f0f6ecdcd" exitCode=0 Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.744691 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-zpx6p" event={"ID":"b63eb623-410f-4130-bf07-845b294c89f1","Type":"ContainerDied","Data":"afbf4edaf46191dc5f10d5be8b1236caf66fc4379cc71d443a41de7f0f6ecdcd"} Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.744721 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-zpx6p" event={"ID":"b63eb623-410f-4130-bf07-845b294c89f1","Type":"ContainerStarted","Data":"03332b114e0c63e745eff2bb3fa4cb9d0a33a096193e69729a43f0dae8275372"} Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.753944 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w25cw\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-kube-api-access-w25cw\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.754024 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-lock\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.754087 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.754125 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-cache\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.754167 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: E1205 01:30:55.754341 4665 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 01:30:55 crc kubenswrapper[4665]: E1205 01:30:55.754358 4665 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 01:30:55 crc kubenswrapper[4665]: E1205 01:30:55.754405 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift podName:d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f nodeName:}" failed. No retries permitted until 2025-12-05 01:30:56.254384771 +0000 UTC m=+1231.593777070 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift") pod "swift-storage-0" (UID: "d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f") : configmap "swift-ring-files" not found Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.754891 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.755086 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-lock\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.755157 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-cache\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.793991 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w25cw\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-kube-api-access-w25cw\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:55 crc kubenswrapper[4665]: I1205 01:30:55.811530 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.087965 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-zlrjx"] Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.089044 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.092001 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.092031 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.092065 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.142488 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-zlrjx"] Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.160233 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wkmd\" (UniqueName: \"kubernetes.io/projected/796fddd5-4127-4632-8728-406e29348c74-kube-api-access-5wkmd\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.160646 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-scripts\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.160683 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-dispersionconf\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.160714 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-combined-ca-bundle\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.160760 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-ring-data-devices\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.160792 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/796fddd5-4127-4632-8728-406e29348c74-etc-swift\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.160818 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-swiftconf\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.262374 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wkmd\" (UniqueName: \"kubernetes.io/projected/796fddd5-4127-4632-8728-406e29348c74-kube-api-access-5wkmd\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.262438 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-scripts\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.262466 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-dispersionconf\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.262491 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-combined-ca-bundle\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.262529 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-ring-data-devices\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.262556 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/796fddd5-4127-4632-8728-406e29348c74-etc-swift\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.262575 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-swiftconf\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.262599 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:56 crc kubenswrapper[4665]: E1205 01:30:56.262759 4665 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 01:30:56 crc kubenswrapper[4665]: E1205 01:30:56.262772 4665 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 01:30:56 crc kubenswrapper[4665]: E1205 01:30:56.262816 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift podName:d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f nodeName:}" failed. No retries permitted until 2025-12-05 01:30:57.262801724 +0000 UTC m=+1232.602194023 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift") pod "swift-storage-0" (UID: "d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f") : configmap "swift-ring-files" not found Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.264372 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-scripts\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.265068 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/796fddd5-4127-4632-8728-406e29348c74-etc-swift\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.268611 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-combined-ca-bundle\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.268788 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-dispersionconf\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.269049 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-ring-data-devices\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.275742 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-swiftconf\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.288006 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wkmd\" (UniqueName: \"kubernetes.io/projected/796fddd5-4127-4632-8728-406e29348c74-kube-api-access-5wkmd\") pod \"swift-ring-rebalance-zlrjx\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.405256 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.754382 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-zpx6p" event={"ID":"b63eb623-410f-4130-bf07-845b294c89f1","Type":"ContainerStarted","Data":"724bd78baf4d982d3d09ab22b8646f289386612dd41914ff591e5e2aec6c09fb"} Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.755150 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.795460 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-698758b865-zpx6p" podStartSLOduration=2.795438215 podStartE2EDuration="2.795438215s" podCreationTimestamp="2025-12-05 01:30:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:30:56.789989425 +0000 UTC m=+1232.129381754" watchObservedRunningTime="2025-12-05 01:30:56.795438215 +0000 UTC m=+1232.134830514" Dec 05 01:30:56 crc kubenswrapper[4665]: I1205 01:30:56.842049 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-zlrjx"] Dec 05 01:30:56 crc kubenswrapper[4665]: W1205 01:30:56.846990 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod796fddd5_4127_4632_8728_406e29348c74.slice/crio-c2a8912c4f88296ecce30485414465a624fc085de508c8b30c9ca53d9278a568 WatchSource:0}: Error finding container c2a8912c4f88296ecce30485414465a624fc085de508c8b30c9ca53d9278a568: Status 404 returned error can't find the container with id c2a8912c4f88296ecce30485414465a624fc085de508c8b30c9ca53d9278a568 Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.279619 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:57 crc kubenswrapper[4665]: E1205 01:30:57.279845 4665 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 01:30:57 crc kubenswrapper[4665]: E1205 01:30:57.279858 4665 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 01:30:57 crc kubenswrapper[4665]: E1205 01:30:57.279903 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift podName:d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f nodeName:}" failed. No retries permitted until 2025-12-05 01:30:59.279887707 +0000 UTC m=+1234.619280006 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift") pod "swift-storage-0" (UID: "d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f") : configmap "swift-ring-files" not found Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.345737 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-ng2qn"] Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.347210 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ng2qn" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.359418 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-243a-account-create-update-kb9j2"] Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.360675 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.365929 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.374104 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-ng2qn"] Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.382439 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kz8gj\" (UniqueName: \"kubernetes.io/projected/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-kube-api-access-kz8gj\") pod \"glance-db-create-ng2qn\" (UID: \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\") " pod="openstack/glance-db-create-ng2qn" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.382906 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-operator-scripts\") pod \"glance-db-create-ng2qn\" (UID: \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\") " pod="openstack/glance-db-create-ng2qn" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.389350 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-243a-account-create-update-kb9j2"] Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.484919 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-operator-scripts\") pod \"glance-db-create-ng2qn\" (UID: \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\") " pod="openstack/glance-db-create-ng2qn" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.485122 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kz8gj\" (UniqueName: \"kubernetes.io/projected/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-kube-api-access-kz8gj\") pod \"glance-db-create-ng2qn\" (UID: \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\") " pod="openstack/glance-db-create-ng2qn" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.485536 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82bcac28-3f91-477d-a370-c510d062b2eb-operator-scripts\") pod \"glance-243a-account-create-update-kb9j2\" (UID: \"82bcac28-3f91-477d-a370-c510d062b2eb\") " pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.485604 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-operator-scripts\") pod \"glance-db-create-ng2qn\" (UID: \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\") " pod="openstack/glance-db-create-ng2qn" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.485620 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ts9fz\" (UniqueName: \"kubernetes.io/projected/82bcac28-3f91-477d-a370-c510d062b2eb-kube-api-access-ts9fz\") pod \"glance-243a-account-create-update-kb9j2\" (UID: \"82bcac28-3f91-477d-a370-c510d062b2eb\") " pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.505391 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kz8gj\" (UniqueName: \"kubernetes.io/projected/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-kube-api-access-kz8gj\") pod \"glance-db-create-ng2qn\" (UID: \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\") " pod="openstack/glance-db-create-ng2qn" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.587374 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82bcac28-3f91-477d-a370-c510d062b2eb-operator-scripts\") pod \"glance-243a-account-create-update-kb9j2\" (UID: \"82bcac28-3f91-477d-a370-c510d062b2eb\") " pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.587744 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ts9fz\" (UniqueName: \"kubernetes.io/projected/82bcac28-3f91-477d-a370-c510d062b2eb-kube-api-access-ts9fz\") pod \"glance-243a-account-create-update-kb9j2\" (UID: \"82bcac28-3f91-477d-a370-c510d062b2eb\") " pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.590770 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82bcac28-3f91-477d-a370-c510d062b2eb-operator-scripts\") pod \"glance-243a-account-create-update-kb9j2\" (UID: \"82bcac28-3f91-477d-a370-c510d062b2eb\") " pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.616945 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ts9fz\" (UniqueName: \"kubernetes.io/projected/82bcac28-3f91-477d-a370-c510d062b2eb-kube-api-access-ts9fz\") pod \"glance-243a-account-create-update-kb9j2\" (UID: \"82bcac28-3f91-477d-a370-c510d062b2eb\") " pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.676709 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ng2qn" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.689093 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:30:57 crc kubenswrapper[4665]: I1205 01:30:57.788368 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zlrjx" event={"ID":"796fddd5-4127-4632-8728-406e29348c74","Type":"ContainerStarted","Data":"c2a8912c4f88296ecce30485414465a624fc085de508c8b30c9ca53d9278a568"} Dec 05 01:30:58 crc kubenswrapper[4665]: I1205 01:30:58.178041 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-ng2qn"] Dec 05 01:30:58 crc kubenswrapper[4665]: W1205 01:30:58.381977 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod82bcac28_3f91_477d_a370_c510d062b2eb.slice/crio-ba918e0d73f4e0f445d48f433f8935f55989e3edd986af21a3eee10ae606eadc WatchSource:0}: Error finding container ba918e0d73f4e0f445d48f433f8935f55989e3edd986af21a3eee10ae606eadc: Status 404 returned error can't find the container with id ba918e0d73f4e0f445d48f433f8935f55989e3edd986af21a3eee10ae606eadc Dec 05 01:30:58 crc kubenswrapper[4665]: I1205 01:30:58.387729 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-243a-account-create-update-kb9j2"] Dec 05 01:30:58 crc kubenswrapper[4665]: I1205 01:30:58.799794 4665 generic.go:334] "Generic (PLEG): container finished" podID="fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b" containerID="c6c8aa33a13c7d1d9c1ee4c5644b01481286d1bdb55267a2efe5c79f8e33ce5e" exitCode=0 Dec 05 01:30:58 crc kubenswrapper[4665]: I1205 01:30:58.799861 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ng2qn" event={"ID":"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b","Type":"ContainerDied","Data":"c6c8aa33a13c7d1d9c1ee4c5644b01481286d1bdb55267a2efe5c79f8e33ce5e"} Dec 05 01:30:58 crc kubenswrapper[4665]: I1205 01:30:58.799890 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ng2qn" event={"ID":"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b","Type":"ContainerStarted","Data":"aa105ae408edef23b827b9b7ce1c9a92432ee834a8e724fa961bf86dd23a2e22"} Dec 05 01:30:58 crc kubenswrapper[4665]: I1205 01:30:58.801420 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-243a-account-create-update-kb9j2" event={"ID":"82bcac28-3f91-477d-a370-c510d062b2eb","Type":"ContainerStarted","Data":"ba918e0d73f4e0f445d48f433f8935f55989e3edd986af21a3eee10ae606eadc"} Dec 05 01:30:59 crc kubenswrapper[4665]: I1205 01:30:59.049986 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-zr2m2" podUID="41de0d44-b33f-43c4-a6c1-54830596874b" containerName="ovn-controller" probeResult="failure" output=< Dec 05 01:30:59 crc kubenswrapper[4665]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 01:30:59 crc kubenswrapper[4665]: > Dec 05 01:30:59 crc kubenswrapper[4665]: I1205 01:30:59.362121 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:30:59 crc kubenswrapper[4665]: E1205 01:30:59.362266 4665 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 01:30:59 crc kubenswrapper[4665]: E1205 01:30:59.362693 4665 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 01:30:59 crc kubenswrapper[4665]: E1205 01:30:59.362749 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift podName:d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f nodeName:}" failed. No retries permitted until 2025-12-05 01:31:03.362730879 +0000 UTC m=+1238.702123178 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift") pod "swift-storage-0" (UID: "d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f") : configmap "swift-ring-files" not found Dec 05 01:30:59 crc kubenswrapper[4665]: I1205 01:30:59.427198 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.689410 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-cgc75"] Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.691093 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.706683 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cgc75"] Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.794223 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cc5a-account-create-update-9lwsp"] Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.795979 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.798061 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.798930 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-operator-scripts\") pod \"keystone-db-create-cgc75\" (UID: \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\") " pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.799019 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8lcl\" (UniqueName: \"kubernetes.io/projected/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-kube-api-access-w8lcl\") pod \"keystone-db-create-cgc75\" (UID: \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\") " pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.804777 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cc5a-account-create-update-9lwsp"] Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.900396 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f748e17a-771b-4153-ab91-1e105e392917-operator-scripts\") pod \"keystone-cc5a-account-create-update-9lwsp\" (UID: \"f748e17a-771b-4153-ab91-1e105e392917\") " pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.905971 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-operator-scripts\") pod \"keystone-db-create-cgc75\" (UID: \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\") " pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.906235 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8lcl\" (UniqueName: \"kubernetes.io/projected/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-kube-api-access-w8lcl\") pod \"keystone-db-create-cgc75\" (UID: \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\") " pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.906629 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rw9zw\" (UniqueName: \"kubernetes.io/projected/f748e17a-771b-4153-ab91-1e105e392917-kube-api-access-rw9zw\") pod \"keystone-cc5a-account-create-update-9lwsp\" (UID: \"f748e17a-771b-4153-ab91-1e105e392917\") " pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.907711 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-operator-scripts\") pod \"keystone-db-create-cgc75\" (UID: \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\") " pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.938429 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8lcl\" (UniqueName: \"kubernetes.io/projected/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-kube-api-access-w8lcl\") pod \"keystone-db-create-cgc75\" (UID: \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\") " pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:01 crc kubenswrapper[4665]: I1205 01:31:01.997608 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-98hr7"] Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.008030 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rw9zw\" (UniqueName: \"kubernetes.io/projected/f748e17a-771b-4153-ab91-1e105e392917-kube-api-access-rw9zw\") pod \"keystone-cc5a-account-create-update-9lwsp\" (UID: \"f748e17a-771b-4153-ab91-1e105e392917\") " pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.008147 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f748e17a-771b-4153-ab91-1e105e392917-operator-scripts\") pod \"keystone-cc5a-account-create-update-9lwsp\" (UID: \"f748e17a-771b-4153-ab91-1e105e392917\") " pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.009072 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f748e17a-771b-4153-ab91-1e105e392917-operator-scripts\") pod \"keystone-cc5a-account-create-update-9lwsp\" (UID: \"f748e17a-771b-4153-ab91-1e105e392917\") " pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.009783 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-98hr7"] Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.009864 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-98hr7" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.032437 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rw9zw\" (UniqueName: \"kubernetes.io/projected/f748e17a-771b-4153-ab91-1e105e392917-kube-api-access-rw9zw\") pod \"keystone-cc5a-account-create-update-9lwsp\" (UID: \"f748e17a-771b-4153-ab91-1e105e392917\") " pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.044841 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.108699 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-e5ee-account-create-update-sq82x"] Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.110002 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.111580 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8q7r\" (UniqueName: \"kubernetes.io/projected/4ef8c440-1a96-47cd-a75a-5d4df00edda5-kube-api-access-m8q7r\") pod \"placement-db-create-98hr7\" (UID: \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\") " pod="openstack/placement-db-create-98hr7" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.111675 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ef8c440-1a96-47cd-a75a-5d4df00edda5-operator-scripts\") pod \"placement-db-create-98hr7\" (UID: \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\") " pod="openstack/placement-db-create-98hr7" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.112378 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.115024 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.126620 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e5ee-account-create-update-sq82x"] Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.212736 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8q7r\" (UniqueName: \"kubernetes.io/projected/4ef8c440-1a96-47cd-a75a-5d4df00edda5-kube-api-access-m8q7r\") pod \"placement-db-create-98hr7\" (UID: \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\") " pod="openstack/placement-db-create-98hr7" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.212800 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0780418-7936-4123-9a41-b3a7e6b22b9d-operator-scripts\") pod \"placement-e5ee-account-create-update-sq82x\" (UID: \"a0780418-7936-4123-9a41-b3a7e6b22b9d\") " pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.212837 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fq8cf\" (UniqueName: \"kubernetes.io/projected/a0780418-7936-4123-9a41-b3a7e6b22b9d-kube-api-access-fq8cf\") pod \"placement-e5ee-account-create-update-sq82x\" (UID: \"a0780418-7936-4123-9a41-b3a7e6b22b9d\") " pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.212899 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ef8c440-1a96-47cd-a75a-5d4df00edda5-operator-scripts\") pod \"placement-db-create-98hr7\" (UID: \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\") " pod="openstack/placement-db-create-98hr7" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.213675 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ef8c440-1a96-47cd-a75a-5d4df00edda5-operator-scripts\") pod \"placement-db-create-98hr7\" (UID: \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\") " pod="openstack/placement-db-create-98hr7" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.233584 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8q7r\" (UniqueName: \"kubernetes.io/projected/4ef8c440-1a96-47cd-a75a-5d4df00edda5-kube-api-access-m8q7r\") pod \"placement-db-create-98hr7\" (UID: \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\") " pod="openstack/placement-db-create-98hr7" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.315417 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0780418-7936-4123-9a41-b3a7e6b22b9d-operator-scripts\") pod \"placement-e5ee-account-create-update-sq82x\" (UID: \"a0780418-7936-4123-9a41-b3a7e6b22b9d\") " pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.315948 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fq8cf\" (UniqueName: \"kubernetes.io/projected/a0780418-7936-4123-9a41-b3a7e6b22b9d-kube-api-access-fq8cf\") pod \"placement-e5ee-account-create-update-sq82x\" (UID: \"a0780418-7936-4123-9a41-b3a7e6b22b9d\") " pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.316731 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0780418-7936-4123-9a41-b3a7e6b22b9d-operator-scripts\") pod \"placement-e5ee-account-create-update-sq82x\" (UID: \"a0780418-7936-4123-9a41-b3a7e6b22b9d\") " pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.327737 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-98hr7" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.332733 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fq8cf\" (UniqueName: \"kubernetes.io/projected/a0780418-7936-4123-9a41-b3a7e6b22b9d-kube-api-access-fq8cf\") pod \"placement-e5ee-account-create-update-sq82x\" (UID: \"a0780418-7936-4123-9a41-b3a7e6b22b9d\") " pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.431716 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.856671 4665 generic.go:334] "Generic (PLEG): container finished" podID="753728b2-97f7-4b79-8daf-19e01260d537" containerID="ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed" exitCode=0 Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.856759 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"753728b2-97f7-4b79-8daf-19e01260d537","Type":"ContainerDied","Data":"ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed"} Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.870247 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-ng2qn" event={"ID":"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b","Type":"ContainerDied","Data":"aa105ae408edef23b827b9b7ce1c9a92432ee834a8e724fa961bf86dd23a2e22"} Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.870306 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aa105ae408edef23b827b9b7ce1c9a92432ee834a8e724fa961bf86dd23a2e22" Dec 05 01:31:02 crc kubenswrapper[4665]: I1205 01:31:02.871227 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ng2qn" Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.029211 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kz8gj\" (UniqueName: \"kubernetes.io/projected/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-kube-api-access-kz8gj\") pod \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\" (UID: \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\") " Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.029760 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-operator-scripts\") pod \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\" (UID: \"fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b\") " Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.030545 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b" (UID: "fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.038373 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-kube-api-access-kz8gj" (OuterVolumeSpecName: "kube-api-access-kz8gj") pod "fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b" (UID: "fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b"). InnerVolumeSpecName "kube-api-access-kz8gj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.131200 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kz8gj\" (UniqueName: \"kubernetes.io/projected/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-kube-api-access-kz8gj\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.131664 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.330231 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-cgc75"] Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.436568 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:31:03 crc kubenswrapper[4665]: E1205 01:31:03.436793 4665 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 01:31:03 crc kubenswrapper[4665]: E1205 01:31:03.436812 4665 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 01:31:03 crc kubenswrapper[4665]: E1205 01:31:03.436866 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift podName:d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f nodeName:}" failed. No retries permitted until 2025-12-05 01:31:11.436848402 +0000 UTC m=+1246.776240711 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift") pod "swift-storage-0" (UID: "d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f") : configmap "swift-ring-files" not found Dec 05 01:31:03 crc kubenswrapper[4665]: W1205 01:31:03.439961 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9f5c28dd_5ab6_4dd1_ad3e_ea0858296e42.slice/crio-99c1fcffb850df2f21cae1449c179928c578d188e4cd608bd1c141ccd9bee4c0 WatchSource:0}: Error finding container 99c1fcffb850df2f21cae1449c179928c578d188e4cd608bd1c141ccd9bee4c0: Status 404 returned error can't find the container with id 99c1fcffb850df2f21cae1449c179928c578d188e4cd608bd1c141ccd9bee4c0 Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.878645 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cgc75" event={"ID":"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42","Type":"ContainerStarted","Data":"6be3f8e25d1ee34087b84793d71b5c0d62eec4714cfcfcd770178ddedfc81719"} Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.878994 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cgc75" event={"ID":"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42","Type":"ContainerStarted","Data":"99c1fcffb850df2f21cae1449c179928c578d188e4cd608bd1c141ccd9bee4c0"} Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.887643 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"753728b2-97f7-4b79-8daf-19e01260d537","Type":"ContainerStarted","Data":"c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724"} Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.888175 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.889670 4665 generic.go:334] "Generic (PLEG): container finished" podID="82bcac28-3f91-477d-a370-c510d062b2eb" containerID="dad5949dfd12b600add8c3b8f40d5ec60a1540da6bc6dc017c526d1c163ee2c6" exitCode=0 Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.889786 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-243a-account-create-update-kb9j2" event={"ID":"82bcac28-3f91-477d-a370-c510d062b2eb","Type":"ContainerDied","Data":"dad5949dfd12b600add8c3b8f40d5ec60a1540da6bc6dc017c526d1c163ee2c6"} Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.891186 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-ng2qn" Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.891472 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zlrjx" event={"ID":"796fddd5-4127-4632-8728-406e29348c74","Type":"ContainerStarted","Data":"a2a1df8da0282bb2bc6e31356fc714a9ed214cc28e0b22a272f9a55fac5db082"} Dec 05 01:31:03 crc kubenswrapper[4665]: I1205 01:31:03.951092 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-create-cgc75" podStartSLOduration=2.951075561 podStartE2EDuration="2.951075561s" podCreationTimestamp="2025-12-05 01:31:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:31:03.947778113 +0000 UTC m=+1239.287170412" watchObservedRunningTime="2025-12-05 01:31:03.951075561 +0000 UTC m=+1239.290467860" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:03.997700 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.250154303 podStartE2EDuration="1m16.996279297s" podCreationTimestamp="2025-12-05 01:29:47 +0000 UTC" firstStartedPulling="2025-12-05 01:29:49.725106361 +0000 UTC m=+1165.064498670" lastFinishedPulling="2025-12-05 01:30:29.471231365 +0000 UTC m=+1204.810623664" observedRunningTime="2025-12-05 01:31:03.983969801 +0000 UTC m=+1239.323362130" watchObservedRunningTime="2025-12-05 01:31:03.996279297 +0000 UTC m=+1239.335671596" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.033316 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-98hr7"] Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.064690 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-zlrjx" podStartSLOduration=1.40871153 podStartE2EDuration="8.064665617s" podCreationTimestamp="2025-12-05 01:30:56 +0000 UTC" firstStartedPulling="2025-12-05 01:30:56.848889157 +0000 UTC m=+1232.188281466" lastFinishedPulling="2025-12-05 01:31:03.504843254 +0000 UTC m=+1238.844235553" observedRunningTime="2025-12-05 01:31:04.03558523 +0000 UTC m=+1239.374977529" watchObservedRunningTime="2025-12-05 01:31:04.064665617 +0000 UTC m=+1239.404057916" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.070169 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-zr2m2" podUID="41de0d44-b33f-43c4-a6c1-54830596874b" containerName="ovn-controller" probeResult="failure" output=< Dec 05 01:31:04 crc kubenswrapper[4665]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 01:31:04 crc kubenswrapper[4665]: > Dec 05 01:31:04 crc kubenswrapper[4665]: W1205 01:31:04.076413 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf748e17a_771b_4153_ab91_1e105e392917.slice/crio-bffc6eac03a5d5206344b77f3fb3d8ecf61b42969c3379d2feeea76f09a30900 WatchSource:0}: Error finding container bffc6eac03a5d5206344b77f3fb3d8ecf61b42969c3379d2feeea76f09a30900: Status 404 returned error can't find the container with id bffc6eac03a5d5206344b77f3fb3d8ecf61b42969c3379d2feeea76f09a30900 Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.096039 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cc5a-account-create-update-9lwsp"] Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.106911 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-e5ee-account-create-update-sq82x"] Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.175780 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:31:04 crc kubenswrapper[4665]: W1205 01:31:04.177103 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda0780418_7936_4123_9a41_b3a7e6b22b9d.slice/crio-007f98156b80e0a08e724e1f0f337b7363ab5c3b6eb1c0f43fc43eb989c403af WatchSource:0}: Error finding container 007f98156b80e0a08e724e1f0f337b7363ab5c3b6eb1c0f43fc43eb989c403af: Status 404 returned error can't find the container with id 007f98156b80e0a08e724e1f0f337b7363ab5c3b6eb1c0f43fc43eb989c403af Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.181544 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-7xcgj" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.391461 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-zr2m2-config-5jt2n"] Dec 05 01:31:04 crc kubenswrapper[4665]: E1205 01:31:04.391807 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b" containerName="mariadb-database-create" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.391823 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b" containerName="mariadb-database-create" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.391988 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b" containerName="mariadb-database-create" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.392603 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.395657 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.439265 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zr2m2-config-5jt2n"] Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.460003 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s2wnv\" (UniqueName: \"kubernetes.io/projected/42cffe78-bc9d-41bf-8d00-c84999388614-kube-api-access-s2wnv\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.460372 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-scripts\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.460464 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.460540 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-log-ovn\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.460652 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-additional-scripts\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.460738 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run-ovn\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.562127 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-additional-scripts\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.562514 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run-ovn\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.562579 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2wnv\" (UniqueName: \"kubernetes.io/projected/42cffe78-bc9d-41bf-8d00-c84999388614-kube-api-access-s2wnv\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.562652 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-scripts\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.562677 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.562694 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-log-ovn\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.562876 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.562935 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-log-ovn\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.563138 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run-ovn\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.564773 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-scripts\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.565185 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-additional-scripts\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.581499 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2wnv\" (UniqueName: \"kubernetes.io/projected/42cffe78-bc9d-41bf-8d00-c84999388614-kube-api-access-s2wnv\") pod \"ovn-controller-zr2m2-config-5jt2n\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.671458 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.707031 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.737945 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wkgb5"] Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.744486 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" podUID="e8114fb6-6378-4fca-969d-c611a6c5f330" containerName="dnsmasq-dns" containerID="cri-o://0476a292ff7105b4c65527a995b5ca1c9d969134367890a7de319069f8242c60" gracePeriod=10 Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.915628 4665 generic.go:334] "Generic (PLEG): container finished" podID="e8114fb6-6378-4fca-969d-c611a6c5f330" containerID="0476a292ff7105b4c65527a995b5ca1c9d969134367890a7de319069f8242c60" exitCode=0 Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.915705 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" event={"ID":"e8114fb6-6378-4fca-969d-c611a6c5f330","Type":"ContainerDied","Data":"0476a292ff7105b4c65527a995b5ca1c9d969134367890a7de319069f8242c60"} Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.927964 4665 generic.go:334] "Generic (PLEG): container finished" podID="a0780418-7936-4123-9a41-b3a7e6b22b9d" containerID="b1a2fc5429305e2d61757fbc7f663c8981a73ba2ac8c1c95d2fce208b8282b60" exitCode=0 Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.928381 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e5ee-account-create-update-sq82x" event={"ID":"a0780418-7936-4123-9a41-b3a7e6b22b9d","Type":"ContainerDied","Data":"b1a2fc5429305e2d61757fbc7f663c8981a73ba2ac8c1c95d2fce208b8282b60"} Dec 05 01:31:04 crc kubenswrapper[4665]: I1205 01:31:04.928406 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e5ee-account-create-update-sq82x" event={"ID":"a0780418-7936-4123-9a41-b3a7e6b22b9d","Type":"ContainerStarted","Data":"007f98156b80e0a08e724e1f0f337b7363ab5c3b6eb1c0f43fc43eb989c403af"} Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.007434 4665 generic.go:334] "Generic (PLEG): container finished" podID="9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42" containerID="6be3f8e25d1ee34087b84793d71b5c0d62eec4714cfcfcd770178ddedfc81719" exitCode=0 Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.007926 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cgc75" event={"ID":"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42","Type":"ContainerDied","Data":"6be3f8e25d1ee34087b84793d71b5c0d62eec4714cfcfcd770178ddedfc81719"} Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.025003 4665 generic.go:334] "Generic (PLEG): container finished" podID="f748e17a-771b-4153-ab91-1e105e392917" containerID="6268d8dbe2a6c32963cc62d42188e3ae89f908889a5513789cde856509c9a0fd" exitCode=0 Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.025256 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cc5a-account-create-update-9lwsp" event={"ID":"f748e17a-771b-4153-ab91-1e105e392917","Type":"ContainerDied","Data":"6268d8dbe2a6c32963cc62d42188e3ae89f908889a5513789cde856509c9a0fd"} Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.025354 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cc5a-account-create-update-9lwsp" event={"ID":"f748e17a-771b-4153-ab91-1e105e392917","Type":"ContainerStarted","Data":"bffc6eac03a5d5206344b77f3fb3d8ecf61b42969c3379d2feeea76f09a30900"} Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.045486 4665 generic.go:334] "Generic (PLEG): container finished" podID="4ef8c440-1a96-47cd-a75a-5d4df00edda5" containerID="f0423d69eaf37e2dc2e226b2cbf451099aaab4c25212917156baad5df253a8dc" exitCode=0 Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.045701 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-98hr7" event={"ID":"4ef8c440-1a96-47cd-a75a-5d4df00edda5","Type":"ContainerDied","Data":"f0423d69eaf37e2dc2e226b2cbf451099aaab4c25212917156baad5df253a8dc"} Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.045723 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-98hr7" event={"ID":"4ef8c440-1a96-47cd-a75a-5d4df00edda5","Type":"ContainerStarted","Data":"4e9c083f6bd693e794a2fb1ce85092118ee5e8f6fa5973386101c38cc336ec70"} Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.388155 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zr2m2-config-5jt2n"] Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.729771 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.738216 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.804333 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4759l\" (UniqueName: \"kubernetes.io/projected/e8114fb6-6378-4fca-969d-c611a6c5f330-kube-api-access-4759l\") pod \"e8114fb6-6378-4fca-969d-c611a6c5f330\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.804418 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-sb\") pod \"e8114fb6-6378-4fca-969d-c611a6c5f330\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.804451 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-config\") pod \"e8114fb6-6378-4fca-969d-c611a6c5f330\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.804468 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-nb\") pod \"e8114fb6-6378-4fca-969d-c611a6c5f330\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.804560 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-dns-svc\") pod \"e8114fb6-6378-4fca-969d-c611a6c5f330\" (UID: \"e8114fb6-6378-4fca-969d-c611a6c5f330\") " Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.804608 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ts9fz\" (UniqueName: \"kubernetes.io/projected/82bcac28-3f91-477d-a370-c510d062b2eb-kube-api-access-ts9fz\") pod \"82bcac28-3f91-477d-a370-c510d062b2eb\" (UID: \"82bcac28-3f91-477d-a370-c510d062b2eb\") " Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.804700 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82bcac28-3f91-477d-a370-c510d062b2eb-operator-scripts\") pod \"82bcac28-3f91-477d-a370-c510d062b2eb\" (UID: \"82bcac28-3f91-477d-a370-c510d062b2eb\") " Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.805671 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82bcac28-3f91-477d-a370-c510d062b2eb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "82bcac28-3f91-477d-a370-c510d062b2eb" (UID: "82bcac28-3f91-477d-a370-c510d062b2eb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.809980 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8114fb6-6378-4fca-969d-c611a6c5f330-kube-api-access-4759l" (OuterVolumeSpecName: "kube-api-access-4759l") pod "e8114fb6-6378-4fca-969d-c611a6c5f330" (UID: "e8114fb6-6378-4fca-969d-c611a6c5f330"). InnerVolumeSpecName "kube-api-access-4759l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.819477 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82bcac28-3f91-477d-a370-c510d062b2eb-kube-api-access-ts9fz" (OuterVolumeSpecName: "kube-api-access-ts9fz") pod "82bcac28-3f91-477d-a370-c510d062b2eb" (UID: "82bcac28-3f91-477d-a370-c510d062b2eb"). InnerVolumeSpecName "kube-api-access-ts9fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.868855 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-config" (OuterVolumeSpecName: "config") pod "e8114fb6-6378-4fca-969d-c611a6c5f330" (UID: "e8114fb6-6378-4fca-969d-c611a6c5f330"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.879188 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "e8114fb6-6378-4fca-969d-c611a6c5f330" (UID: "e8114fb6-6378-4fca-969d-c611a6c5f330"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.906354 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.906389 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ts9fz\" (UniqueName: \"kubernetes.io/projected/82bcac28-3f91-477d-a370-c510d062b2eb-kube-api-access-ts9fz\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.906405 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/82bcac28-3f91-477d-a370-c510d062b2eb-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.906416 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4759l\" (UniqueName: \"kubernetes.io/projected/e8114fb6-6378-4fca-969d-c611a6c5f330-kube-api-access-4759l\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.906427 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.906864 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "e8114fb6-6378-4fca-969d-c611a6c5f330" (UID: "e8114fb6-6378-4fca-969d-c611a6c5f330"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:05 crc kubenswrapper[4665]: I1205 01:31:05.912687 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "e8114fb6-6378-4fca-969d-c611a6c5f330" (UID: "e8114fb6-6378-4fca-969d-c611a6c5f330"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.007788 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.007831 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/e8114fb6-6378-4fca-969d-c611a6c5f330-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.055495 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-243a-account-create-update-kb9j2" event={"ID":"82bcac28-3f91-477d-a370-c510d062b2eb","Type":"ContainerDied","Data":"ba918e0d73f4e0f445d48f433f8935f55989e3edd986af21a3eee10ae606eadc"} Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.055586 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ba918e0d73f4e0f445d48f433f8935f55989e3edd986af21a3eee10ae606eadc" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.055797 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-243a-account-create-update-kb9j2" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.057041 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zr2m2-config-5jt2n" event={"ID":"42cffe78-bc9d-41bf-8d00-c84999388614","Type":"ContainerStarted","Data":"5cfd0f19363a033011ee0af06b0b2d2598450d17fb3061a7261801926e54e0d0"} Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.059694 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.060698 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-86db49b7ff-wkgb5" event={"ID":"e8114fb6-6378-4fca-969d-c611a6c5f330","Type":"ContainerDied","Data":"90ce53a3d244c810b4808524a42c15bbd68c80c3644a0b46e2fc0e26101ea5bc"} Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.060752 4665 scope.go:117] "RemoveContainer" containerID="0476a292ff7105b4c65527a995b5ca1c9d969134367890a7de319069f8242c60" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.118507 4665 scope.go:117] "RemoveContainer" containerID="a3a97311cfddd4e5c9d954502f159b1d28c3dd01c0f51d640ae0f244f3a9ad5b" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.148170 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wkgb5"] Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.148241 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-86db49b7ff-wkgb5"] Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.527664 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.590122 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.617221 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-98hr7" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.628065 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fq8cf\" (UniqueName: \"kubernetes.io/projected/a0780418-7936-4123-9a41-b3a7e6b22b9d-kube-api-access-fq8cf\") pod \"a0780418-7936-4123-9a41-b3a7e6b22b9d\" (UID: \"a0780418-7936-4123-9a41-b3a7e6b22b9d\") " Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.628171 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0780418-7936-4123-9a41-b3a7e6b22b9d-operator-scripts\") pod \"a0780418-7936-4123-9a41-b3a7e6b22b9d\" (UID: \"a0780418-7936-4123-9a41-b3a7e6b22b9d\") " Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.628242 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-operator-scripts\") pod \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\" (UID: \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\") " Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.628334 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w8lcl\" (UniqueName: \"kubernetes.io/projected/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-kube-api-access-w8lcl\") pod \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\" (UID: \"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42\") " Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.636750 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a0780418-7936-4123-9a41-b3a7e6b22b9d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a0780418-7936-4123-9a41-b3a7e6b22b9d" (UID: "a0780418-7936-4123-9a41-b3a7e6b22b9d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.642633 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-kube-api-access-w8lcl" (OuterVolumeSpecName: "kube-api-access-w8lcl") pod "9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42" (UID: "9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42"). InnerVolumeSpecName "kube-api-access-w8lcl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.651669 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42" (UID: "9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.651949 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0780418-7936-4123-9a41-b3a7e6b22b9d-kube-api-access-fq8cf" (OuterVolumeSpecName: "kube-api-access-fq8cf") pod "a0780418-7936-4123-9a41-b3a7e6b22b9d" (UID: "a0780418-7936-4123-9a41-b3a7e6b22b9d"). InnerVolumeSpecName "kube-api-access-fq8cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.729866 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ef8c440-1a96-47cd-a75a-5d4df00edda5-operator-scripts\") pod \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\" (UID: \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\") " Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.729945 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8q7r\" (UniqueName: \"kubernetes.io/projected/4ef8c440-1a96-47cd-a75a-5d4df00edda5-kube-api-access-m8q7r\") pod \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\" (UID: \"4ef8c440-1a96-47cd-a75a-5d4df00edda5\") " Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.730431 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fq8cf\" (UniqueName: \"kubernetes.io/projected/a0780418-7936-4123-9a41-b3a7e6b22b9d-kube-api-access-fq8cf\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.730451 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a0780418-7936-4123-9a41-b3a7e6b22b9d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.730460 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.730470 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w8lcl\" (UniqueName: \"kubernetes.io/projected/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42-kube-api-access-w8lcl\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.731531 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4ef8c440-1a96-47cd-a75a-5d4df00edda5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4ef8c440-1a96-47cd-a75a-5d4df00edda5" (UID: "4ef8c440-1a96-47cd-a75a-5d4df00edda5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.734398 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.735333 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ef8c440-1a96-47cd-a75a-5d4df00edda5-kube-api-access-m8q7r" (OuterVolumeSpecName: "kube-api-access-m8q7r") pod "4ef8c440-1a96-47cd-a75a-5d4df00edda5" (UID: "4ef8c440-1a96-47cd-a75a-5d4df00edda5"). InnerVolumeSpecName "kube-api-access-m8q7r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.831320 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rw9zw\" (UniqueName: \"kubernetes.io/projected/f748e17a-771b-4153-ab91-1e105e392917-kube-api-access-rw9zw\") pod \"f748e17a-771b-4153-ab91-1e105e392917\" (UID: \"f748e17a-771b-4153-ab91-1e105e392917\") " Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.831804 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f748e17a-771b-4153-ab91-1e105e392917-operator-scripts\") pod \"f748e17a-771b-4153-ab91-1e105e392917\" (UID: \"f748e17a-771b-4153-ab91-1e105e392917\") " Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.832227 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4ef8c440-1a96-47cd-a75a-5d4df00edda5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.832243 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8q7r\" (UniqueName: \"kubernetes.io/projected/4ef8c440-1a96-47cd-a75a-5d4df00edda5-kube-api-access-m8q7r\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.832675 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f748e17a-771b-4153-ab91-1e105e392917-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f748e17a-771b-4153-ab91-1e105e392917" (UID: "f748e17a-771b-4153-ab91-1e105e392917"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.835548 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f748e17a-771b-4153-ab91-1e105e392917-kube-api-access-rw9zw" (OuterVolumeSpecName: "kube-api-access-rw9zw") pod "f748e17a-771b-4153-ab91-1e105e392917" (UID: "f748e17a-771b-4153-ab91-1e105e392917"). InnerVolumeSpecName "kube-api-access-rw9zw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.901948 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8114fb6-6378-4fca-969d-c611a6c5f330" path="/var/lib/kubelet/pods/e8114fb6-6378-4fca-969d-c611a6c5f330/volumes" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.933598 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f748e17a-771b-4153-ab91-1e105e392917-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:06 crc kubenswrapper[4665]: I1205 01:31:06.933630 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rw9zw\" (UniqueName: \"kubernetes.io/projected/f748e17a-771b-4153-ab91-1e105e392917-kube-api-access-rw9zw\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.069202 4665 generic.go:334] "Generic (PLEG): container finished" podID="42cffe78-bc9d-41bf-8d00-c84999388614" containerID="b014d93b6d1ffc0abbec7266cdb1929e882757c6c87d997025d3c287a9fdd928" exitCode=0 Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.069692 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zr2m2-config-5jt2n" event={"ID":"42cffe78-bc9d-41bf-8d00-c84999388614","Type":"ContainerDied","Data":"b014d93b6d1ffc0abbec7266cdb1929e882757c6c87d997025d3c287a9fdd928"} Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.070526 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-98hr7" event={"ID":"4ef8c440-1a96-47cd-a75a-5d4df00edda5","Type":"ContainerDied","Data":"4e9c083f6bd693e794a2fb1ce85092118ee5e8f6fa5973386101c38cc336ec70"} Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.070546 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e9c083f6bd693e794a2fb1ce85092118ee5e8f6fa5973386101c38cc336ec70" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.070588 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-98hr7" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.073974 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-e5ee-account-create-update-sq82x" event={"ID":"a0780418-7936-4123-9a41-b3a7e6b22b9d","Type":"ContainerDied","Data":"007f98156b80e0a08e724e1f0f337b7363ab5c3b6eb1c0f43fc43eb989c403af"} Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.074003 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="007f98156b80e0a08e724e1f0f337b7363ab5c3b6eb1c0f43fc43eb989c403af" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.074062 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-e5ee-account-create-update-sq82x" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.075133 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-cgc75" event={"ID":"9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42","Type":"ContainerDied","Data":"99c1fcffb850df2f21cae1449c179928c578d188e4cd608bd1c141ccd9bee4c0"} Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.075143 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-cgc75" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.075150 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99c1fcffb850df2f21cae1449c179928c578d188e4cd608bd1c141ccd9bee4c0" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.076946 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cc5a-account-create-update-9lwsp" event={"ID":"f748e17a-771b-4153-ab91-1e105e392917","Type":"ContainerDied","Data":"bffc6eac03a5d5206344b77f3fb3d8ecf61b42969c3379d2feeea76f09a30900"} Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.077010 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bffc6eac03a5d5206344b77f3fb3d8ecf61b42969c3379d2feeea76f09a30900" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.077015 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cc5a-account-create-update-9lwsp" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.433911 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-dd2gb"] Dec 05 01:31:07 crc kubenswrapper[4665]: E1205 01:31:07.434245 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82bcac28-3f91-477d-a370-c510d062b2eb" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434264 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="82bcac28-3f91-477d-a370-c510d062b2eb" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: E1205 01:31:07.434278 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42" containerName="mariadb-database-create" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434285 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42" containerName="mariadb-database-create" Dec 05 01:31:07 crc kubenswrapper[4665]: E1205 01:31:07.434316 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0780418-7936-4123-9a41-b3a7e6b22b9d" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434323 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0780418-7936-4123-9a41-b3a7e6b22b9d" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: E1205 01:31:07.434339 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8114fb6-6378-4fca-969d-c611a6c5f330" containerName="init" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434344 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8114fb6-6378-4fca-969d-c611a6c5f330" containerName="init" Dec 05 01:31:07 crc kubenswrapper[4665]: E1205 01:31:07.434352 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ef8c440-1a96-47cd-a75a-5d4df00edda5" containerName="mariadb-database-create" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434358 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ef8c440-1a96-47cd-a75a-5d4df00edda5" containerName="mariadb-database-create" Dec 05 01:31:07 crc kubenswrapper[4665]: E1205 01:31:07.434369 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f748e17a-771b-4153-ab91-1e105e392917" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434376 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f748e17a-771b-4153-ab91-1e105e392917" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: E1205 01:31:07.434389 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8114fb6-6378-4fca-969d-c611a6c5f330" containerName="dnsmasq-dns" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434395 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8114fb6-6378-4fca-969d-c611a6c5f330" containerName="dnsmasq-dns" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434533 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42" containerName="mariadb-database-create" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434548 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="82bcac28-3f91-477d-a370-c510d062b2eb" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434560 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ef8c440-1a96-47cd-a75a-5d4df00edda5" containerName="mariadb-database-create" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434568 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8114fb6-6378-4fca-969d-c611a6c5f330" containerName="dnsmasq-dns" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434577 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f748e17a-771b-4153-ab91-1e105e392917" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.434587 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0780418-7936-4123-9a41-b3a7e6b22b9d" containerName="mariadb-account-create-update" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.435170 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.437975 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.438098 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-wn6mh" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.447648 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-dd2gb"] Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.543979 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-db-sync-config-data\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.544052 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sk9hf\" (UniqueName: \"kubernetes.io/projected/271549ee-1768-4a76-bbc9-d931689a6ad9-kube-api-access-sk9hf\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.544082 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-combined-ca-bundle\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.544674 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-config-data\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.646280 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-config-data\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.646354 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-db-sync-config-data\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.646397 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sk9hf\" (UniqueName: \"kubernetes.io/projected/271549ee-1768-4a76-bbc9-d931689a6ad9-kube-api-access-sk9hf\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.646416 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-combined-ca-bundle\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.651011 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-config-data\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.651060 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-combined-ca-bundle\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.651383 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-db-sync-config-data\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.663228 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sk9hf\" (UniqueName: \"kubernetes.io/projected/271549ee-1768-4a76-bbc9-d931689a6ad9-kube-api-access-sk9hf\") pod \"glance-db-sync-dd2gb\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:07 crc kubenswrapper[4665]: I1205 01:31:07.753080 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.292391 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-dd2gb"] Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.420180 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.467366 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run\") pod \"42cffe78-bc9d-41bf-8d00-c84999388614\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.467461 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-scripts\") pod \"42cffe78-bc9d-41bf-8d00-c84999388614\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.467505 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-additional-scripts\") pod \"42cffe78-bc9d-41bf-8d00-c84999388614\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.467561 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run" (OuterVolumeSpecName: "var-run") pod "42cffe78-bc9d-41bf-8d00-c84999388614" (UID: "42cffe78-bc9d-41bf-8d00-c84999388614"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.467613 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run-ovn\") pod \"42cffe78-bc9d-41bf-8d00-c84999388614\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.467697 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s2wnv\" (UniqueName: \"kubernetes.io/projected/42cffe78-bc9d-41bf-8d00-c84999388614-kube-api-access-s2wnv\") pod \"42cffe78-bc9d-41bf-8d00-c84999388614\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.467734 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-log-ovn\") pod \"42cffe78-bc9d-41bf-8d00-c84999388614\" (UID: \"42cffe78-bc9d-41bf-8d00-c84999388614\") " Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.468115 4665 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.468160 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "42cffe78-bc9d-41bf-8d00-c84999388614" (UID: "42cffe78-bc9d-41bf-8d00-c84999388614"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.468171 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "42cffe78-bc9d-41bf-8d00-c84999388614" (UID: "42cffe78-bc9d-41bf-8d00-c84999388614"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.468188 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "42cffe78-bc9d-41bf-8d00-c84999388614" (UID: "42cffe78-bc9d-41bf-8d00-c84999388614"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.468449 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-scripts" (OuterVolumeSpecName: "scripts") pod "42cffe78-bc9d-41bf-8d00-c84999388614" (UID: "42cffe78-bc9d-41bf-8d00-c84999388614"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.475732 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42cffe78-bc9d-41bf-8d00-c84999388614-kube-api-access-s2wnv" (OuterVolumeSpecName: "kube-api-access-s2wnv") pod "42cffe78-bc9d-41bf-8d00-c84999388614" (UID: "42cffe78-bc9d-41bf-8d00-c84999388614"). InnerVolumeSpecName "kube-api-access-s2wnv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.570049 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s2wnv\" (UniqueName: \"kubernetes.io/projected/42cffe78-bc9d-41bf-8d00-c84999388614-kube-api-access-s2wnv\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.570084 4665 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.570093 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.570103 4665 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/42cffe78-bc9d-41bf-8d00-c84999388614-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:08 crc kubenswrapper[4665]: I1205 01:31:08.570111 4665 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/42cffe78-bc9d-41bf-8d00-c84999388614-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.041023 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-zr2m2" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.107226 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2-config-5jt2n" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.107334 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zr2m2-config-5jt2n" event={"ID":"42cffe78-bc9d-41bf-8d00-c84999388614","Type":"ContainerDied","Data":"5cfd0f19363a033011ee0af06b0b2d2598450d17fb3061a7261801926e54e0d0"} Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.107434 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5cfd0f19363a033011ee0af06b0b2d2598450d17fb3061a7261801926e54e0d0" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.108930 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dd2gb" event={"ID":"271549ee-1768-4a76-bbc9-d931689a6ad9","Type":"ContainerStarted","Data":"883885eef4f01fe2706eddfb7bf0a155f4910fc13d04ad51a8636a6c51773694"} Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.540762 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-zr2m2-config-5jt2n"] Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.550273 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-zr2m2-config-5jt2n"] Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.656505 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-zr2m2-config-c42zs"] Dec 05 01:31:09 crc kubenswrapper[4665]: E1205 01:31:09.656898 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42cffe78-bc9d-41bf-8d00-c84999388614" containerName="ovn-config" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.656919 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="42cffe78-bc9d-41bf-8d00-c84999388614" containerName="ovn-config" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.657102 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="42cffe78-bc9d-41bf-8d00-c84999388614" containerName="ovn-config" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.657633 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.659765 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.672895 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zr2m2-config-c42zs"] Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.791306 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-log-ovn\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.791607 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.791668 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run-ovn\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.791816 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-scripts\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.791857 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4sk7\" (UniqueName: \"kubernetes.io/projected/feae0823-936d-48a1-9bd5-38ee2d98dd16-kube-api-access-f4sk7\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.791893 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-additional-scripts\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893569 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893610 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run-ovn\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893669 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-scripts\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893694 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4sk7\" (UniqueName: \"kubernetes.io/projected/feae0823-936d-48a1-9bd5-38ee2d98dd16-kube-api-access-f4sk7\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893720 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-additional-scripts\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893761 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-log-ovn\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893895 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893915 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run-ovn\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.893908 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-log-ovn\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.894828 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-additional-scripts\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.895732 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-scripts\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.913015 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4sk7\" (UniqueName: \"kubernetes.io/projected/feae0823-936d-48a1-9bd5-38ee2d98dd16-kube-api-access-f4sk7\") pod \"ovn-controller-zr2m2-config-c42zs\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:09 crc kubenswrapper[4665]: I1205 01:31:09.977985 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:10 crc kubenswrapper[4665]: I1205 01:31:10.273968 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-zr2m2-config-c42zs"] Dec 05 01:31:10 crc kubenswrapper[4665]: I1205 01:31:10.904880 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42cffe78-bc9d-41bf-8d00-c84999388614" path="/var/lib/kubelet/pods/42cffe78-bc9d-41bf-8d00-c84999388614/volumes" Dec 05 01:31:11 crc kubenswrapper[4665]: I1205 01:31:11.131566 4665 generic.go:334] "Generic (PLEG): container finished" podID="feae0823-936d-48a1-9bd5-38ee2d98dd16" containerID="f3984be52016e332c575d67ff002b615eb5fd0d73d4bd66fb4f0c371e26877e7" exitCode=0 Dec 05 01:31:11 crc kubenswrapper[4665]: I1205 01:31:11.131617 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zr2m2-config-c42zs" event={"ID":"feae0823-936d-48a1-9bd5-38ee2d98dd16","Type":"ContainerDied","Data":"f3984be52016e332c575d67ff002b615eb5fd0d73d4bd66fb4f0c371e26877e7"} Dec 05 01:31:11 crc kubenswrapper[4665]: I1205 01:31:11.131667 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zr2m2-config-c42zs" event={"ID":"feae0823-936d-48a1-9bd5-38ee2d98dd16","Type":"ContainerStarted","Data":"95d8243ee733f86a139f020731530b8a50780dc862d1eb3c9e98b22237caf21e"} Dec 05 01:31:11 crc kubenswrapper[4665]: I1205 01:31:11.520576 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:31:11 crc kubenswrapper[4665]: E1205 01:31:11.520916 4665 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 01:31:11 crc kubenswrapper[4665]: E1205 01:31:11.520936 4665 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 01:31:11 crc kubenswrapper[4665]: E1205 01:31:11.521004 4665 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift podName:d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f nodeName:}" failed. No retries permitted until 2025-12-05 01:31:27.520988687 +0000 UTC m=+1262.860380986 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift") pod "swift-storage-0" (UID: "d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f") : configmap "swift-ring-files" not found Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.152332 4665 generic.go:334] "Generic (PLEG): container finished" podID="82ad13d8-7710-4135-9822-a96d62650e6d" containerID="21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993" exitCode=0 Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.152421 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"82ad13d8-7710-4135-9822-a96d62650e6d","Type":"ContainerDied","Data":"21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993"} Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.493885 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.546270 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-additional-scripts\") pod \"feae0823-936d-48a1-9bd5-38ee2d98dd16\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.546733 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run\") pod \"feae0823-936d-48a1-9bd5-38ee2d98dd16\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.546831 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run" (OuterVolumeSpecName: "var-run") pod "feae0823-936d-48a1-9bd5-38ee2d98dd16" (UID: "feae0823-936d-48a1-9bd5-38ee2d98dd16"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.547010 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run-ovn\") pod \"feae0823-936d-48a1-9bd5-38ee2d98dd16\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.547121 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4sk7\" (UniqueName: \"kubernetes.io/projected/feae0823-936d-48a1-9bd5-38ee2d98dd16-kube-api-access-f4sk7\") pod \"feae0823-936d-48a1-9bd5-38ee2d98dd16\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.547203 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-log-ovn\") pod \"feae0823-936d-48a1-9bd5-38ee2d98dd16\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.547304 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-scripts\") pod \"feae0823-936d-48a1-9bd5-38ee2d98dd16\" (UID: \"feae0823-936d-48a1-9bd5-38ee2d98dd16\") " Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.547049 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "feae0823-936d-48a1-9bd5-38ee2d98dd16" (UID: "feae0823-936d-48a1-9bd5-38ee2d98dd16"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.547069 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "feae0823-936d-48a1-9bd5-38ee2d98dd16" (UID: "feae0823-936d-48a1-9bd5-38ee2d98dd16"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.547596 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "feae0823-936d-48a1-9bd5-38ee2d98dd16" (UID: "feae0823-936d-48a1-9bd5-38ee2d98dd16"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.548041 4665 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.548127 4665 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.548193 4665 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/feae0823-936d-48a1-9bd5-38ee2d98dd16-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.548263 4665 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.548688 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-scripts" (OuterVolumeSpecName: "scripts") pod "feae0823-936d-48a1-9bd5-38ee2d98dd16" (UID: "feae0823-936d-48a1-9bd5-38ee2d98dd16"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.551142 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feae0823-936d-48a1-9bd5-38ee2d98dd16-kube-api-access-f4sk7" (OuterVolumeSpecName: "kube-api-access-f4sk7") pod "feae0823-936d-48a1-9bd5-38ee2d98dd16" (UID: "feae0823-936d-48a1-9bd5-38ee2d98dd16"). InnerVolumeSpecName "kube-api-access-f4sk7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.649896 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4sk7\" (UniqueName: \"kubernetes.io/projected/feae0823-936d-48a1-9bd5-38ee2d98dd16-kube-api-access-f4sk7\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:12 crc kubenswrapper[4665]: I1205 01:31:12.649928 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/feae0823-936d-48a1-9bd5-38ee2d98dd16-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:13 crc kubenswrapper[4665]: I1205 01:31:13.166255 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"82ad13d8-7710-4135-9822-a96d62650e6d","Type":"ContainerStarted","Data":"68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691"} Dec 05 01:31:13 crc kubenswrapper[4665]: I1205 01:31:13.167465 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:31:13 crc kubenswrapper[4665]: I1205 01:31:13.174635 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-zr2m2-config-c42zs" event={"ID":"feae0823-936d-48a1-9bd5-38ee2d98dd16","Type":"ContainerDied","Data":"95d8243ee733f86a139f020731530b8a50780dc862d1eb3c9e98b22237caf21e"} Dec 05 01:31:13 crc kubenswrapper[4665]: I1205 01:31:13.174680 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95d8243ee733f86a139f020731530b8a50780dc862d1eb3c9e98b22237caf21e" Dec 05 01:31:13 crc kubenswrapper[4665]: I1205 01:31:13.174744 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-zr2m2-config-c42zs" Dec 05 01:31:13 crc kubenswrapper[4665]: I1205 01:31:13.209890 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371950.644901 podStartE2EDuration="1m26.209874563s" podCreationTimestamp="2025-12-05 01:29:47 +0000 UTC" firstStartedPulling="2025-12-05 01:29:49.662717636 +0000 UTC m=+1165.002109935" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:31:13.205068668 +0000 UTC m=+1248.544460967" watchObservedRunningTime="2025-12-05 01:31:13.209874563 +0000 UTC m=+1248.549266862" Dec 05 01:31:13 crc kubenswrapper[4665]: I1205 01:31:13.576988 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-zr2m2-config-c42zs"] Dec 05 01:31:13 crc kubenswrapper[4665]: I1205 01:31:13.586203 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-zr2m2-config-c42zs"] Dec 05 01:31:14 crc kubenswrapper[4665]: I1205 01:31:14.194085 4665 generic.go:334] "Generic (PLEG): container finished" podID="796fddd5-4127-4632-8728-406e29348c74" containerID="a2a1df8da0282bb2bc6e31356fc714a9ed214cc28e0b22a272f9a55fac5db082" exitCode=0 Dec 05 01:31:14 crc kubenswrapper[4665]: I1205 01:31:14.194423 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zlrjx" event={"ID":"796fddd5-4127-4632-8728-406e29348c74","Type":"ContainerDied","Data":"a2a1df8da0282bb2bc6e31356fc714a9ed214cc28e0b22a272f9a55fac5db082"} Dec 05 01:31:14 crc kubenswrapper[4665]: I1205 01:31:14.904658 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feae0823-936d-48a1-9bd5-38ee2d98dd16" path="/var/lib/kubelet/pods/feae0823-936d-48a1-9bd5-38ee2d98dd16/volumes" Dec 05 01:31:14 crc kubenswrapper[4665]: I1205 01:31:14.930543 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:31:14 crc kubenswrapper[4665]: I1205 01:31:14.930594 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:31:14 crc kubenswrapper[4665]: I1205 01:31:14.930639 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:31:14 crc kubenswrapper[4665]: I1205 01:31:14.932011 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ddea2cf7dfc7d76e1a9cf4f232382b2b597e0edaf17f47e1250c2d22c5805549"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:31:14 crc kubenswrapper[4665]: I1205 01:31:14.932106 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://ddea2cf7dfc7d76e1a9cf4f232382b2b597e0edaf17f47e1250c2d22c5805549" gracePeriod=600 Dec 05 01:31:16 crc kubenswrapper[4665]: I1205 01:31:16.212926 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="ddea2cf7dfc7d76e1a9cf4f232382b2b597e0edaf17f47e1250c2d22c5805549" exitCode=0 Dec 05 01:31:16 crc kubenswrapper[4665]: I1205 01:31:16.213129 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"ddea2cf7dfc7d76e1a9cf4f232382b2b597e0edaf17f47e1250c2d22c5805549"} Dec 05 01:31:16 crc kubenswrapper[4665]: I1205 01:31:16.213272 4665 scope.go:117] "RemoveContainer" containerID="1cd3d1505d7ab823a5b8b16aa7787ab1595f7aa23355a3b5bb9a7a6dd4cb7347" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.013452 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.435413 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-2l7ml"] Dec 05 01:31:19 crc kubenswrapper[4665]: E1205 01:31:19.435820 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feae0823-936d-48a1-9bd5-38ee2d98dd16" containerName="ovn-config" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.435841 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="feae0823-936d-48a1-9bd5-38ee2d98dd16" containerName="ovn-config" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.436034 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="feae0823-936d-48a1-9bd5-38ee2d98dd16" containerName="ovn-config" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.436743 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.462074 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-2l7ml"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.554573 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-9x5x9"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.555565 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.571770 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khtt5\" (UniqueName: \"kubernetes.io/projected/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-kube-api-access-khtt5\") pod \"barbican-db-create-2l7ml\" (UID: \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\") " pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.571814 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-operator-scripts\") pod \"barbican-db-create-2l7ml\" (UID: \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\") " pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.576557 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9x5x9"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.636629 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-1f10-account-create-update-9c7nb"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.637635 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.644674 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.656770 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1f10-account-create-update-9c7nb"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.673201 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/003173d7-ba20-4971-b03e-ba2fb5039ff7-operator-scripts\") pod \"cinder-db-create-9x5x9\" (UID: \"003173d7-ba20-4971-b03e-ba2fb5039ff7\") " pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.673237 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khtt5\" (UniqueName: \"kubernetes.io/projected/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-kube-api-access-khtt5\") pod \"barbican-db-create-2l7ml\" (UID: \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\") " pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.673269 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-operator-scripts\") pod \"barbican-db-create-2l7ml\" (UID: \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\") " pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.673361 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8s2hj\" (UniqueName: \"kubernetes.io/projected/003173d7-ba20-4971-b03e-ba2fb5039ff7-kube-api-access-8s2hj\") pod \"cinder-db-create-9x5x9\" (UID: \"003173d7-ba20-4971-b03e-ba2fb5039ff7\") " pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.674189 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-operator-scripts\") pod \"barbican-db-create-2l7ml\" (UID: \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\") " pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.704179 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-v2dln"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.705506 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.708714 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6ssd4" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.708885 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.708905 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.709661 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.720958 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khtt5\" (UniqueName: \"kubernetes.io/projected/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-kube-api-access-khtt5\") pod \"barbican-db-create-2l7ml\" (UID: \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\") " pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.738240 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-v2dln"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.764955 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-3a7f-account-create-update-2b647"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.766079 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.768470 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.769964 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.778403 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff695797-ac78-42f0-9a60-96aa898b80f5-operator-scripts\") pod \"barbican-1f10-account-create-update-9c7nb\" (UID: \"ff695797-ac78-42f0-9a60-96aa898b80f5\") " pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.778489 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/003173d7-ba20-4971-b03e-ba2fb5039ff7-operator-scripts\") pod \"cinder-db-create-9x5x9\" (UID: \"003173d7-ba20-4971-b03e-ba2fb5039ff7\") " pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.778620 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8s2hj\" (UniqueName: \"kubernetes.io/projected/003173d7-ba20-4971-b03e-ba2fb5039ff7-kube-api-access-8s2hj\") pod \"cinder-db-create-9x5x9\" (UID: \"003173d7-ba20-4971-b03e-ba2fb5039ff7\") " pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.778749 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vhvv7\" (UniqueName: \"kubernetes.io/projected/ff695797-ac78-42f0-9a60-96aa898b80f5-kube-api-access-vhvv7\") pod \"barbican-1f10-account-create-update-9c7nb\" (UID: \"ff695797-ac78-42f0-9a60-96aa898b80f5\") " pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.779989 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/003173d7-ba20-4971-b03e-ba2fb5039ff7-operator-scripts\") pod \"cinder-db-create-9x5x9\" (UID: \"003173d7-ba20-4971-b03e-ba2fb5039ff7\") " pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.802448 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8s2hj\" (UniqueName: \"kubernetes.io/projected/003173d7-ba20-4971-b03e-ba2fb5039ff7-kube-api-access-8s2hj\") pod \"cinder-db-create-9x5x9\" (UID: \"003173d7-ba20-4971-b03e-ba2fb5039ff7\") " pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.819420 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3a7f-account-create-update-2b647"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.848991 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-slrj8"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.850099 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.857700 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-slrj8"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.872683 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.880188 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vhvv7\" (UniqueName: \"kubernetes.io/projected/ff695797-ac78-42f0-9a60-96aa898b80f5-kube-api-access-vhvv7\") pod \"barbican-1f10-account-create-update-9c7nb\" (UID: \"ff695797-ac78-42f0-9a60-96aa898b80f5\") " pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.880234 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff695797-ac78-42f0-9a60-96aa898b80f5-operator-scripts\") pod \"barbican-1f10-account-create-update-9c7nb\" (UID: \"ff695797-ac78-42f0-9a60-96aa898b80f5\") " pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.880259 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d97449-bfcc-4c56-8b48-b2f604661b77-operator-scripts\") pod \"cinder-3a7f-account-create-update-2b647\" (UID: \"27d97449-bfcc-4c56-8b48-b2f604661b77\") " pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.880280 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8p2r5\" (UniqueName: \"kubernetes.io/projected/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-kube-api-access-8p2r5\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.880311 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f29cl\" (UniqueName: \"kubernetes.io/projected/27d97449-bfcc-4c56-8b48-b2f604661b77-kube-api-access-f29cl\") pod \"cinder-3a7f-account-create-update-2b647\" (UID: \"27d97449-bfcc-4c56-8b48-b2f604661b77\") " pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.880330 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-combined-ca-bundle\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.880361 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-config-data\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.881325 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff695797-ac78-42f0-9a60-96aa898b80f5-operator-scripts\") pod \"barbican-1f10-account-create-update-9c7nb\" (UID: \"ff695797-ac78-42f0-9a60-96aa898b80f5\") " pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.904502 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vhvv7\" (UniqueName: \"kubernetes.io/projected/ff695797-ac78-42f0-9a60-96aa898b80f5-kube-api-access-vhvv7\") pod \"barbican-1f10-account-create-update-9c7nb\" (UID: \"ff695797-ac78-42f0-9a60-96aa898b80f5\") " pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.936215 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7056-account-create-update-g7mgf"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.937512 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.950375 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.953425 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7056-account-create-update-g7mgf"] Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.962157 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.981485 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkzbj\" (UniqueName: \"kubernetes.io/projected/52dcbd14-37d0-437d-882d-c05591461848-kube-api-access-kkzbj\") pod \"neutron-db-create-slrj8\" (UID: \"52dcbd14-37d0-437d-882d-c05591461848\") " pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.981615 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d97449-bfcc-4c56-8b48-b2f604661b77-operator-scripts\") pod \"cinder-3a7f-account-create-update-2b647\" (UID: \"27d97449-bfcc-4c56-8b48-b2f604661b77\") " pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.981642 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8p2r5\" (UniqueName: \"kubernetes.io/projected/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-kube-api-access-8p2r5\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.981663 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-combined-ca-bundle\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.981688 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f29cl\" (UniqueName: \"kubernetes.io/projected/27d97449-bfcc-4c56-8b48-b2f604661b77-kube-api-access-f29cl\") pod \"cinder-3a7f-account-create-update-2b647\" (UID: \"27d97449-bfcc-4c56-8b48-b2f604661b77\") " pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.981733 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-config-data\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.981780 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52dcbd14-37d0-437d-882d-c05591461848-operator-scripts\") pod \"neutron-db-create-slrj8\" (UID: \"52dcbd14-37d0-437d-882d-c05591461848\") " pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.983209 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d97449-bfcc-4c56-8b48-b2f604661b77-operator-scripts\") pod \"cinder-3a7f-account-create-update-2b647\" (UID: \"27d97449-bfcc-4c56-8b48-b2f604661b77\") " pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:19 crc kubenswrapper[4665]: I1205 01:31:19.988983 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-config-data\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.007291 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-combined-ca-bundle\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.007516 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8p2r5\" (UniqueName: \"kubernetes.io/projected/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-kube-api-access-8p2r5\") pod \"keystone-db-sync-v2dln\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.011987 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f29cl\" (UniqueName: \"kubernetes.io/projected/27d97449-bfcc-4c56-8b48-b2f604661b77-kube-api-access-f29cl\") pod \"cinder-3a7f-account-create-update-2b647\" (UID: \"27d97449-bfcc-4c56-8b48-b2f604661b77\") " pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.074739 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.083482 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52dcbd14-37d0-437d-882d-c05591461848-operator-scripts\") pod \"neutron-db-create-slrj8\" (UID: \"52dcbd14-37d0-437d-882d-c05591461848\") " pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.084123 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52dcbd14-37d0-437d-882d-c05591461848-operator-scripts\") pod \"neutron-db-create-slrj8\" (UID: \"52dcbd14-37d0-437d-882d-c05591461848\") " pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.084279 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkzbj\" (UniqueName: \"kubernetes.io/projected/52dcbd14-37d0-437d-882d-c05591461848-kube-api-access-kkzbj\") pod \"neutron-db-create-slrj8\" (UID: \"52dcbd14-37d0-437d-882d-c05591461848\") " pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.084595 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rm2xj\" (UniqueName: \"kubernetes.io/projected/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-kube-api-access-rm2xj\") pod \"neutron-7056-account-create-update-g7mgf\" (UID: \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\") " pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.084696 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-operator-scripts\") pod \"neutron-7056-account-create-update-g7mgf\" (UID: \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\") " pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.097802 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.115108 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkzbj\" (UniqueName: \"kubernetes.io/projected/52dcbd14-37d0-437d-882d-c05591461848-kube-api-access-kkzbj\") pod \"neutron-db-create-slrj8\" (UID: \"52dcbd14-37d0-437d-882d-c05591461848\") " pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.169772 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.186450 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-operator-scripts\") pod \"neutron-7056-account-create-update-g7mgf\" (UID: \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\") " pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.186602 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rm2xj\" (UniqueName: \"kubernetes.io/projected/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-kube-api-access-rm2xj\") pod \"neutron-7056-account-create-update-g7mgf\" (UID: \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\") " pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.187285 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-operator-scripts\") pod \"neutron-7056-account-create-update-g7mgf\" (UID: \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\") " pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.207355 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rm2xj\" (UniqueName: \"kubernetes.io/projected/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-kube-api-access-rm2xj\") pod \"neutron-7056-account-create-update-g7mgf\" (UID: \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\") " pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:20 crc kubenswrapper[4665]: I1205 01:31:20.260672 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.168862 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.285711 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-zlrjx" event={"ID":"796fddd5-4127-4632-8728-406e29348c74","Type":"ContainerDied","Data":"c2a8912c4f88296ecce30485414465a624fc085de508c8b30c9ca53d9278a568"} Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.285751 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c2a8912c4f88296ecce30485414465a624fc085de508c8b30c9ca53d9278a568" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.285755 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-zlrjx" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.340372 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wkmd\" (UniqueName: \"kubernetes.io/projected/796fddd5-4127-4632-8728-406e29348c74-kube-api-access-5wkmd\") pod \"796fddd5-4127-4632-8728-406e29348c74\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.340670 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-swiftconf\") pod \"796fddd5-4127-4632-8728-406e29348c74\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.340710 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/796fddd5-4127-4632-8728-406e29348c74-etc-swift\") pod \"796fddd5-4127-4632-8728-406e29348c74\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.340739 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-dispersionconf\") pod \"796fddd5-4127-4632-8728-406e29348c74\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.340763 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-ring-data-devices\") pod \"796fddd5-4127-4632-8728-406e29348c74\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.340856 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-scripts\") pod \"796fddd5-4127-4632-8728-406e29348c74\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.340935 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-combined-ca-bundle\") pod \"796fddd5-4127-4632-8728-406e29348c74\" (UID: \"796fddd5-4127-4632-8728-406e29348c74\") " Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.343747 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "796fddd5-4127-4632-8728-406e29348c74" (UID: "796fddd5-4127-4632-8728-406e29348c74"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.344125 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/796fddd5-4127-4632-8728-406e29348c74-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "796fddd5-4127-4632-8728-406e29348c74" (UID: "796fddd5-4127-4632-8728-406e29348c74"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.363286 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/796fddd5-4127-4632-8728-406e29348c74-kube-api-access-5wkmd" (OuterVolumeSpecName: "kube-api-access-5wkmd") pod "796fddd5-4127-4632-8728-406e29348c74" (UID: "796fddd5-4127-4632-8728-406e29348c74"). InnerVolumeSpecName "kube-api-access-5wkmd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.383841 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-scripts" (OuterVolumeSpecName: "scripts") pod "796fddd5-4127-4632-8728-406e29348c74" (UID: "796fddd5-4127-4632-8728-406e29348c74"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.396268 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "796fddd5-4127-4632-8728-406e29348c74" (UID: "796fddd5-4127-4632-8728-406e29348c74"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.439129 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "796fddd5-4127-4632-8728-406e29348c74" (UID: "796fddd5-4127-4632-8728-406e29348c74"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.442523 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.442549 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.442564 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wkmd\" (UniqueName: \"kubernetes.io/projected/796fddd5-4127-4632-8728-406e29348c74-kube-api-access-5wkmd\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.442578 4665 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/796fddd5-4127-4632-8728-406e29348c74-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.442588 4665 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.442601 4665 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/796fddd5-4127-4632-8728-406e29348c74-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.449340 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "796fddd5-4127-4632-8728-406e29348c74" (UID: "796fddd5-4127-4632-8728-406e29348c74"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.544783 4665 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/796fddd5-4127-4632-8728-406e29348c74-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.728251 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7056-account-create-update-g7mgf"] Dec 05 01:31:23 crc kubenswrapper[4665]: W1205 01:31:23.732547 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod99d012f6_fa77_4ff5_8dc6_7e1c48ec7365.slice/crio-44077ae3fdb99e003a9a520f057385ca38fd2795e86e26ff5edfef536d3af1bf WatchSource:0}: Error finding container 44077ae3fdb99e003a9a520f057385ca38fd2795e86e26ff5edfef536d3af1bf: Status 404 returned error can't find the container with id 44077ae3fdb99e003a9a520f057385ca38fd2795e86e26ff5edfef536d3af1bf Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.938772 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-2l7ml"] Dec 05 01:31:23 crc kubenswrapper[4665]: I1205 01:31:23.965582 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-9x5x9"] Dec 05 01:31:24 crc kubenswrapper[4665]: W1205 01:31:24.035362 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod003173d7_ba20_4971_b03e_ba2fb5039ff7.slice/crio-868b1005fb8df702b2360c50d9ac9e0830bcc0c865d685f284b09c5276cebb89 WatchSource:0}: Error finding container 868b1005fb8df702b2360c50d9ac9e0830bcc0c865d685f284b09c5276cebb89: Status 404 returned error can't find the container with id 868b1005fb8df702b2360c50d9ac9e0830bcc0c865d685f284b09c5276cebb89 Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.050375 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-3a7f-account-create-update-2b647"] Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.057348 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-slrj8"] Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.079535 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-v2dln"] Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.267582 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-1f10-account-create-update-9c7nb"] Dec 05 01:31:24 crc kubenswrapper[4665]: W1205 01:31:24.277130 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podff695797_ac78_42f0_9a60_96aa898b80f5.slice/crio-8d71272bd205bd27431d1b0e81f574d6a387cba4f23f142f6fc917a65f635da3 WatchSource:0}: Error finding container 8d71272bd205bd27431d1b0e81f574d6a387cba4f23f142f6fc917a65f635da3: Status 404 returned error can't find the container with id 8d71272bd205bd27431d1b0e81f574d6a387cba4f23f142f6fc917a65f635da3 Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.296426 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-slrj8" event={"ID":"52dcbd14-37d0-437d-882d-c05591461848","Type":"ContainerStarted","Data":"4e89c802226db9e387ce8c4fa47b5857d75c2da955675bbb00ba2f686af184ba"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.297598 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3a7f-account-create-update-2b647" event={"ID":"27d97449-bfcc-4c56-8b48-b2f604661b77","Type":"ContainerStarted","Data":"48a2c0d4af8f2ecfe618c0f8d1fde0f54a615c1f279338dfa817fb2d0a4e68fa"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.300895 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"8528e05d6539c1b4845305ab27b265834c7200bd6a2bd4006fa1a98598856bbe"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.303239 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-2l7ml" event={"ID":"c4310ab2-2a7e-4c02-a58c-f50d4d85882d","Type":"ContainerStarted","Data":"4ea20b7b337765275b07b14cefa3b6e620096f9dfff68a7da27cf673493105d3"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.303285 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-2l7ml" event={"ID":"c4310ab2-2a7e-4c02-a58c-f50d4d85882d","Type":"ContainerStarted","Data":"fc50f03312a6bab6ce4ef1576b030a89aae82b6cc7cbb37fdfe172caaeacf678"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.307785 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v2dln" event={"ID":"25488b08-5c2b-47d5-a7c1-1c1609bf8dab","Type":"ContainerStarted","Data":"5e3bbb3b717bee94ec441889945a4aa825c58276909cac06aead69ab35295fd2"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.309249 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1f10-account-create-update-9c7nb" event={"ID":"ff695797-ac78-42f0-9a60-96aa898b80f5","Type":"ContainerStarted","Data":"8d71272bd205bd27431d1b0e81f574d6a387cba4f23f142f6fc917a65f635da3"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.310437 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9x5x9" event={"ID":"003173d7-ba20-4971-b03e-ba2fb5039ff7","Type":"ContainerStarted","Data":"868b1005fb8df702b2360c50d9ac9e0830bcc0c865d685f284b09c5276cebb89"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.312153 4665 generic.go:334] "Generic (PLEG): container finished" podID="99d012f6-fa77-4ff5-8dc6-7e1c48ec7365" containerID="22afa371b02fddf8b83bfd2c688c8d7e334603b2179d1bf856ccc6e404dab94d" exitCode=0 Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.312215 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7056-account-create-update-g7mgf" event={"ID":"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365","Type":"ContainerDied","Data":"22afa371b02fddf8b83bfd2c688c8d7e334603b2179d1bf856ccc6e404dab94d"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.312256 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7056-account-create-update-g7mgf" event={"ID":"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365","Type":"ContainerStarted","Data":"44077ae3fdb99e003a9a520f057385ca38fd2795e86e26ff5edfef536d3af1bf"} Dec 05 01:31:24 crc kubenswrapper[4665]: I1205 01:31:24.333587 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-2l7ml" podStartSLOduration=5.333568923 podStartE2EDuration="5.333568923s" podCreationTimestamp="2025-12-05 01:31:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:31:24.331339619 +0000 UTC m=+1259.670731928" watchObservedRunningTime="2025-12-05 01:31:24.333568923 +0000 UTC m=+1259.672961222" Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.326957 4665 generic.go:334] "Generic (PLEG): container finished" podID="ff695797-ac78-42f0-9a60-96aa898b80f5" containerID="9a047dd4d1adad72c9ea3845955449ec2eaf74cb9e85f74aa754a7424476eb67" exitCode=0 Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.327586 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1f10-account-create-update-9c7nb" event={"ID":"ff695797-ac78-42f0-9a60-96aa898b80f5","Type":"ContainerDied","Data":"9a047dd4d1adad72c9ea3845955449ec2eaf74cb9e85f74aa754a7424476eb67"} Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.336706 4665 generic.go:334] "Generic (PLEG): container finished" podID="003173d7-ba20-4971-b03e-ba2fb5039ff7" containerID="1cfe9fc27e6fd8079a155b696bf1d3926da2eb4e68f054b07d29e1e50a18c8e5" exitCode=0 Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.336799 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9x5x9" event={"ID":"003173d7-ba20-4971-b03e-ba2fb5039ff7","Type":"ContainerDied","Data":"1cfe9fc27e6fd8079a155b696bf1d3926da2eb4e68f054b07d29e1e50a18c8e5"} Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.344096 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dd2gb" event={"ID":"271549ee-1768-4a76-bbc9-d931689a6ad9","Type":"ContainerStarted","Data":"1dff241f5ff1e4ad5c49917b0ce39d1d31a24a35e0777c4d7a9f4307f3bb9b39"} Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.355090 4665 generic.go:334] "Generic (PLEG): container finished" podID="52dcbd14-37d0-437d-882d-c05591461848" containerID="a03593562853dc2ab8ebd888203016bf6dcba76b9925d09c412a7c386554450d" exitCode=0 Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.355183 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-slrj8" event={"ID":"52dcbd14-37d0-437d-882d-c05591461848","Type":"ContainerDied","Data":"a03593562853dc2ab8ebd888203016bf6dcba76b9925d09c412a7c386554450d"} Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.365532 4665 generic.go:334] "Generic (PLEG): container finished" podID="27d97449-bfcc-4c56-8b48-b2f604661b77" containerID="3bfe5c309623ea488f44ab0c9a2f228dffff2035ed5b5589c330cbbff2241f9d" exitCode=0 Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.365589 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3a7f-account-create-update-2b647" event={"ID":"27d97449-bfcc-4c56-8b48-b2f604661b77","Type":"ContainerDied","Data":"3bfe5c309623ea488f44ab0c9a2f228dffff2035ed5b5589c330cbbff2241f9d"} Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.366914 4665 generic.go:334] "Generic (PLEG): container finished" podID="c4310ab2-2a7e-4c02-a58c-f50d4d85882d" containerID="4ea20b7b337765275b07b14cefa3b6e620096f9dfff68a7da27cf673493105d3" exitCode=0 Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.367646 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-2l7ml" event={"ID":"c4310ab2-2a7e-4c02-a58c-f50d4d85882d","Type":"ContainerDied","Data":"4ea20b7b337765275b07b14cefa3b6e620096f9dfff68a7da27cf673493105d3"} Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.454055 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-dd2gb" podStartSLOduration=3.422278663 podStartE2EDuration="18.454038389s" podCreationTimestamp="2025-12-05 01:31:07 +0000 UTC" firstStartedPulling="2025-12-05 01:31:08.300240123 +0000 UTC m=+1243.639632412" lastFinishedPulling="2025-12-05 01:31:23.331999829 +0000 UTC m=+1258.671392138" observedRunningTime="2025-12-05 01:31:25.45284467 +0000 UTC m=+1260.792236969" watchObservedRunningTime="2025-12-05 01:31:25.454038389 +0000 UTC m=+1260.793430688" Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.787183 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.891392 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rm2xj\" (UniqueName: \"kubernetes.io/projected/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-kube-api-access-rm2xj\") pod \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\" (UID: \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\") " Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.891472 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-operator-scripts\") pod \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\" (UID: \"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365\") " Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.892358 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "99d012f6-fa77-4ff5-8dc6-7e1c48ec7365" (UID: "99d012f6-fa77-4ff5-8dc6-7e1c48ec7365"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.896467 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-kube-api-access-rm2xj" (OuterVolumeSpecName: "kube-api-access-rm2xj") pod "99d012f6-fa77-4ff5-8dc6-7e1c48ec7365" (UID: "99d012f6-fa77-4ff5-8dc6-7e1c48ec7365"). InnerVolumeSpecName "kube-api-access-rm2xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.994135 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rm2xj\" (UniqueName: \"kubernetes.io/projected/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-kube-api-access-rm2xj\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:25 crc kubenswrapper[4665]: I1205 01:31:25.994192 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:26 crc kubenswrapper[4665]: I1205 01:31:26.379287 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7056-account-create-update-g7mgf" event={"ID":"99d012f6-fa77-4ff5-8dc6-7e1c48ec7365","Type":"ContainerDied","Data":"44077ae3fdb99e003a9a520f057385ca38fd2795e86e26ff5edfef536d3af1bf"} Dec 05 01:31:26 crc kubenswrapper[4665]: I1205 01:31:26.379441 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="44077ae3fdb99e003a9a520f057385ca38fd2795e86e26ff5edfef536d3af1bf" Dec 05 01:31:26 crc kubenswrapper[4665]: I1205 01:31:26.379460 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7056-account-create-update-g7mgf" Dec 05 01:31:27 crc kubenswrapper[4665]: I1205 01:31:27.532273 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:31:27 crc kubenswrapper[4665]: I1205 01:31:27.544565 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f-etc-swift\") pod \"swift-storage-0\" (UID: \"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f\") " pod="openstack/swift-storage-0" Dec 05 01:31:27 crc kubenswrapper[4665]: I1205 01:31:27.615798 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.091788 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.099509 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.129660 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.158507 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff695797-ac78-42f0-9a60-96aa898b80f5-operator-scripts\") pod \"ff695797-ac78-42f0-9a60-96aa898b80f5\" (UID: \"ff695797-ac78-42f0-9a60-96aa898b80f5\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.158560 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kkzbj\" (UniqueName: \"kubernetes.io/projected/52dcbd14-37d0-437d-882d-c05591461848-kube-api-access-kkzbj\") pod \"52dcbd14-37d0-437d-882d-c05591461848\" (UID: \"52dcbd14-37d0-437d-882d-c05591461848\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.158592 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52dcbd14-37d0-437d-882d-c05591461848-operator-scripts\") pod \"52dcbd14-37d0-437d-882d-c05591461848\" (UID: \"52dcbd14-37d0-437d-882d-c05591461848\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.158722 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vhvv7\" (UniqueName: \"kubernetes.io/projected/ff695797-ac78-42f0-9a60-96aa898b80f5-kube-api-access-vhvv7\") pod \"ff695797-ac78-42f0-9a60-96aa898b80f5\" (UID: \"ff695797-ac78-42f0-9a60-96aa898b80f5\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.159766 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff695797-ac78-42f0-9a60-96aa898b80f5-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ff695797-ac78-42f0-9a60-96aa898b80f5" (UID: "ff695797-ac78-42f0-9a60-96aa898b80f5"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.160447 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/52dcbd14-37d0-437d-882d-c05591461848-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "52dcbd14-37d0-437d-882d-c05591461848" (UID: "52dcbd14-37d0-437d-882d-c05591461848"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.167353 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/52dcbd14-37d0-437d-882d-c05591461848-kube-api-access-kkzbj" (OuterVolumeSpecName: "kube-api-access-kkzbj") pod "52dcbd14-37d0-437d-882d-c05591461848" (UID: "52dcbd14-37d0-437d-882d-c05591461848"). InnerVolumeSpecName "kube-api-access-kkzbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.178526 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff695797-ac78-42f0-9a60-96aa898b80f5-kube-api-access-vhvv7" (OuterVolumeSpecName: "kube-api-access-vhvv7") pod "ff695797-ac78-42f0-9a60-96aa898b80f5" (UID: "ff695797-ac78-42f0-9a60-96aa898b80f5"). InnerVolumeSpecName "kube-api-access-vhvv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.212768 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.233355 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.251282 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.260562 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khtt5\" (UniqueName: \"kubernetes.io/projected/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-kube-api-access-khtt5\") pod \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\" (UID: \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.260662 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-operator-scripts\") pod \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\" (UID: \"c4310ab2-2a7e-4c02-a58c-f50d4d85882d\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.261696 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vhvv7\" (UniqueName: \"kubernetes.io/projected/ff695797-ac78-42f0-9a60-96aa898b80f5-kube-api-access-vhvv7\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.261716 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ff695797-ac78-42f0-9a60-96aa898b80f5-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.261725 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kkzbj\" (UniqueName: \"kubernetes.io/projected/52dcbd14-37d0-437d-882d-c05591461848-kube-api-access-kkzbj\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.261734 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/52dcbd14-37d0-437d-882d-c05591461848-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.262264 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c4310ab2-2a7e-4c02-a58c-f50d4d85882d" (UID: "c4310ab2-2a7e-4c02-a58c-f50d4d85882d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.270461 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-kube-api-access-khtt5" (OuterVolumeSpecName: "kube-api-access-khtt5") pod "c4310ab2-2a7e-4c02-a58c-f50d4d85882d" (UID: "c4310ab2-2a7e-4c02-a58c-f50d4d85882d"). InnerVolumeSpecName "kube-api-access-khtt5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.362687 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8s2hj\" (UniqueName: \"kubernetes.io/projected/003173d7-ba20-4971-b03e-ba2fb5039ff7-kube-api-access-8s2hj\") pod \"003173d7-ba20-4971-b03e-ba2fb5039ff7\" (UID: \"003173d7-ba20-4971-b03e-ba2fb5039ff7\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.362859 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f29cl\" (UniqueName: \"kubernetes.io/projected/27d97449-bfcc-4c56-8b48-b2f604661b77-kube-api-access-f29cl\") pod \"27d97449-bfcc-4c56-8b48-b2f604661b77\" (UID: \"27d97449-bfcc-4c56-8b48-b2f604661b77\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.362887 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/003173d7-ba20-4971-b03e-ba2fb5039ff7-operator-scripts\") pod \"003173d7-ba20-4971-b03e-ba2fb5039ff7\" (UID: \"003173d7-ba20-4971-b03e-ba2fb5039ff7\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.362994 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d97449-bfcc-4c56-8b48-b2f604661b77-operator-scripts\") pod \"27d97449-bfcc-4c56-8b48-b2f604661b77\" (UID: \"27d97449-bfcc-4c56-8b48-b2f604661b77\") " Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.363368 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.363386 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khtt5\" (UniqueName: \"kubernetes.io/projected/c4310ab2-2a7e-4c02-a58c-f50d4d85882d-kube-api-access-khtt5\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.364110 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27d97449-bfcc-4c56-8b48-b2f604661b77-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "27d97449-bfcc-4c56-8b48-b2f604661b77" (UID: "27d97449-bfcc-4c56-8b48-b2f604661b77"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.371086 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/003173d7-ba20-4971-b03e-ba2fb5039ff7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "003173d7-ba20-4971-b03e-ba2fb5039ff7" (UID: "003173d7-ba20-4971-b03e-ba2fb5039ff7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.371586 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27d97449-bfcc-4c56-8b48-b2f604661b77-kube-api-access-f29cl" (OuterVolumeSpecName: "kube-api-access-f29cl") pod "27d97449-bfcc-4c56-8b48-b2f604661b77" (UID: "27d97449-bfcc-4c56-8b48-b2f604661b77"). InnerVolumeSpecName "kube-api-access-f29cl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.371892 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/003173d7-ba20-4971-b03e-ba2fb5039ff7-kube-api-access-8s2hj" (OuterVolumeSpecName: "kube-api-access-8s2hj") pod "003173d7-ba20-4971-b03e-ba2fb5039ff7" (UID: "003173d7-ba20-4971-b03e-ba2fb5039ff7"). InnerVolumeSpecName "kube-api-access-8s2hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.417061 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-9x5x9" event={"ID":"003173d7-ba20-4971-b03e-ba2fb5039ff7","Type":"ContainerDied","Data":"868b1005fb8df702b2360c50d9ac9e0830bcc0c865d685f284b09c5276cebb89"} Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.417097 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="868b1005fb8df702b2360c50d9ac9e0830bcc0c865d685f284b09c5276cebb89" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.417156 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-9x5x9" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.438059 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-slrj8" event={"ID":"52dcbd14-37d0-437d-882d-c05591461848","Type":"ContainerDied","Data":"4e89c802226db9e387ce8c4fa47b5857d75c2da955675bbb00ba2f686af184ba"} Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.438100 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4e89c802226db9e387ce8c4fa47b5857d75c2da955675bbb00ba2f686af184ba" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.438213 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-slrj8" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.443344 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-3a7f-account-create-update-2b647" event={"ID":"27d97449-bfcc-4c56-8b48-b2f604661b77","Type":"ContainerDied","Data":"48a2c0d4af8f2ecfe618c0f8d1fde0f54a615c1f279338dfa817fb2d0a4e68fa"} Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.443384 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48a2c0d4af8f2ecfe618c0f8d1fde0f54a615c1f279338dfa817fb2d0a4e68fa" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.443445 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-3a7f-account-create-update-2b647" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.457846 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-2l7ml" event={"ID":"c4310ab2-2a7e-4c02-a58c-f50d4d85882d","Type":"ContainerDied","Data":"fc50f03312a6bab6ce4ef1576b030a89aae82b6cc7cbb37fdfe172caaeacf678"} Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.457890 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc50f03312a6bab6ce4ef1576b030a89aae82b6cc7cbb37fdfe172caaeacf678" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.457965 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-2l7ml" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.474781 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f29cl\" (UniqueName: \"kubernetes.io/projected/27d97449-bfcc-4c56-8b48-b2f604661b77-kube-api-access-f29cl\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.474813 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/003173d7-ba20-4971-b03e-ba2fb5039ff7-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.474823 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27d97449-bfcc-4c56-8b48-b2f604661b77-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.474834 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8s2hj\" (UniqueName: \"kubernetes.io/projected/003173d7-ba20-4971-b03e-ba2fb5039ff7-kube-api-access-8s2hj\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.489463 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v2dln" event={"ID":"25488b08-5c2b-47d5-a7c1-1c1609bf8dab","Type":"ContainerStarted","Data":"84a93caaad018815bec32398a8c470b321fb95d76ff964205b4b18113dee3dcc"} Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.492960 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-1f10-account-create-update-9c7nb" event={"ID":"ff695797-ac78-42f0-9a60-96aa898b80f5","Type":"ContainerDied","Data":"8d71272bd205bd27431d1b0e81f574d6a387cba4f23f142f6fc917a65f635da3"} Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.492992 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d71272bd205bd27431d1b0e81f574d6a387cba4f23f142f6fc917a65f635da3" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.493073 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-1f10-account-create-update-9c7nb" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.542744 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-v2dln" podStartSLOduration=5.677371731 podStartE2EDuration="10.542721579s" podCreationTimestamp="2025-12-05 01:31:19 +0000 UTC" firstStartedPulling="2025-12-05 01:31:24.151014311 +0000 UTC m=+1259.490406610" lastFinishedPulling="2025-12-05 01:31:29.016364149 +0000 UTC m=+1264.355756458" observedRunningTime="2025-12-05 01:31:29.532080484 +0000 UTC m=+1264.871472783" watchObservedRunningTime="2025-12-05 01:31:29.542721579 +0000 UTC m=+1264.882113878" Dec 05 01:31:29 crc kubenswrapper[4665]: I1205 01:31:29.654013 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 01:31:29 crc kubenswrapper[4665]: W1205 01:31:29.655354 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd2bb1abb_06b0_416c_94fb_6afcbb0c8c3f.slice/crio-2094fa61f80d91652b0ff8d077002c3941a3e5ee6c2c90caa22bc683e1cb0791 WatchSource:0}: Error finding container 2094fa61f80d91652b0ff8d077002c3941a3e5ee6c2c90caa22bc683e1cb0791: Status 404 returned error can't find the container with id 2094fa61f80d91652b0ff8d077002c3941a3e5ee6c2c90caa22bc683e1cb0791 Dec 05 01:31:30 crc kubenswrapper[4665]: I1205 01:31:30.502457 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"2094fa61f80d91652b0ff8d077002c3941a3e5ee6c2c90caa22bc683e1cb0791"} Dec 05 01:31:32 crc kubenswrapper[4665]: I1205 01:31:32.520428 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"3f703a336e212bc53d163bd6ab787857d0416e395c13d0a11db69568268714c8"} Dec 05 01:31:32 crc kubenswrapper[4665]: I1205 01:31:32.520980 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"3fca16497c532a9a6df6a8a75675c9d93e013d338df35c50fcc818d46c65b75f"} Dec 05 01:31:32 crc kubenswrapper[4665]: I1205 01:31:32.520991 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"81a94f4e6cb4b3f90e1370211718b8b31729fe4294cce8f4bfcf0dbbe4d2fe12"} Dec 05 01:31:32 crc kubenswrapper[4665]: I1205 01:31:32.520999 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"da990c9c3ed4a1432975cc658b288e15334edb948a66e64f21ab9f4cdfc233cf"} Dec 05 01:31:35 crc kubenswrapper[4665]: I1205 01:31:35.545665 4665 generic.go:334] "Generic (PLEG): container finished" podID="25488b08-5c2b-47d5-a7c1-1c1609bf8dab" containerID="84a93caaad018815bec32398a8c470b321fb95d76ff964205b4b18113dee3dcc" exitCode=0 Dec 05 01:31:35 crc kubenswrapper[4665]: I1205 01:31:35.545750 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v2dln" event={"ID":"25488b08-5c2b-47d5-a7c1-1c1609bf8dab","Type":"ContainerDied","Data":"84a93caaad018815bec32398a8c470b321fb95d76ff964205b4b18113dee3dcc"} Dec 05 01:31:35 crc kubenswrapper[4665]: I1205 01:31:35.555062 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"3d1348bcfb5f9fb7f1da754bb305e13fcd4a4107f768103343c7a88cb7f35981"} Dec 05 01:31:35 crc kubenswrapper[4665]: I1205 01:31:35.555111 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"ec392de7c79b28c4df4bae970c589efaae45e79e3add63e4942d7c73dc9c8dc0"} Dec 05 01:31:35 crc kubenswrapper[4665]: I1205 01:31:35.555123 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"28d4c85b07abcae4835a5aa30c397854e4ec369b5a47ca8f4c7080c0b6ea62d3"} Dec 05 01:31:35 crc kubenswrapper[4665]: I1205 01:31:35.555135 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"7a2de9e10bd5d563fa731a2ccbba522fe166e1a221c68e39e7e03a25f659a41e"} Dec 05 01:31:35 crc kubenswrapper[4665]: I1205 01:31:35.558870 4665 generic.go:334] "Generic (PLEG): container finished" podID="271549ee-1768-4a76-bbc9-d931689a6ad9" containerID="1dff241f5ff1e4ad5c49917b0ce39d1d31a24a35e0777c4d7a9f4307f3bb9b39" exitCode=0 Dec 05 01:31:35 crc kubenswrapper[4665]: I1205 01:31:35.558922 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dd2gb" event={"ID":"271549ee-1768-4a76-bbc9-d931689a6ad9","Type":"ContainerDied","Data":"1dff241f5ff1e4ad5c49917b0ce39d1d31a24a35e0777c4d7a9f4307f3bb9b39"} Dec 05 01:31:36 crc kubenswrapper[4665]: I1205 01:31:36.584582 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"82e57ce1e191be2e02f5e7380631882e53b68c914e11d31c45dc608795c10396"} Dec 05 01:31:36 crc kubenswrapper[4665]: I1205 01:31:36.923483 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.012369 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-config-data\") pod \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.012821 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-combined-ca-bundle\") pod \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.013066 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8p2r5\" (UniqueName: \"kubernetes.io/projected/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-kube-api-access-8p2r5\") pod \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\" (UID: \"25488b08-5c2b-47d5-a7c1-1c1609bf8dab\") " Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.047841 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-kube-api-access-8p2r5" (OuterVolumeSpecName: "kube-api-access-8p2r5") pod "25488b08-5c2b-47d5-a7c1-1c1609bf8dab" (UID: "25488b08-5c2b-47d5-a7c1-1c1609bf8dab"). InnerVolumeSpecName "kube-api-access-8p2r5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.070176 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "25488b08-5c2b-47d5-a7c1-1c1609bf8dab" (UID: "25488b08-5c2b-47d5-a7c1-1c1609bf8dab"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.088561 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-config-data" (OuterVolumeSpecName: "config-data") pod "25488b08-5c2b-47d5-a7c1-1c1609bf8dab" (UID: "25488b08-5c2b-47d5-a7c1-1c1609bf8dab"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.106739 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.123786 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.123814 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.123825 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8p2r5\" (UniqueName: \"kubernetes.io/projected/25488b08-5c2b-47d5-a7c1-1c1609bf8dab-kube-api-access-8p2r5\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.224689 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-db-sync-config-data\") pod \"271549ee-1768-4a76-bbc9-d931689a6ad9\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.224737 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-config-data\") pod \"271549ee-1768-4a76-bbc9-d931689a6ad9\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.224866 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sk9hf\" (UniqueName: \"kubernetes.io/projected/271549ee-1768-4a76-bbc9-d931689a6ad9-kube-api-access-sk9hf\") pod \"271549ee-1768-4a76-bbc9-d931689a6ad9\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.224953 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-combined-ca-bundle\") pod \"271549ee-1768-4a76-bbc9-d931689a6ad9\" (UID: \"271549ee-1768-4a76-bbc9-d931689a6ad9\") " Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.228468 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "271549ee-1768-4a76-bbc9-d931689a6ad9" (UID: "271549ee-1768-4a76-bbc9-d931689a6ad9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.228753 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/271549ee-1768-4a76-bbc9-d931689a6ad9-kube-api-access-sk9hf" (OuterVolumeSpecName: "kube-api-access-sk9hf") pod "271549ee-1768-4a76-bbc9-d931689a6ad9" (UID: "271549ee-1768-4a76-bbc9-d931689a6ad9"). InnerVolumeSpecName "kube-api-access-sk9hf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.255427 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "271549ee-1768-4a76-bbc9-d931689a6ad9" (UID: "271549ee-1768-4a76-bbc9-d931689a6ad9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.272065 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-config-data" (OuterVolumeSpecName: "config-data") pod "271549ee-1768-4a76-bbc9-d931689a6ad9" (UID: "271549ee-1768-4a76-bbc9-d931689a6ad9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.326493 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sk9hf\" (UniqueName: \"kubernetes.io/projected/271549ee-1768-4a76-bbc9-d931689a6ad9-kube-api-access-sk9hf\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.326519 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.326528 4665 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.326537 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/271549ee-1768-4a76-bbc9-d931689a6ad9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.606401 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-v2dln" event={"ID":"25488b08-5c2b-47d5-a7c1-1c1609bf8dab","Type":"ContainerDied","Data":"5e3bbb3b717bee94ec441889945a4aa825c58276909cac06aead69ab35295fd2"} Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.606704 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5e3bbb3b717bee94ec441889945a4aa825c58276909cac06aead69ab35295fd2" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.606565 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-v2dln" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.628849 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"fb869c818d9313ca6a770a71d6b7cfa5d468e575974ee177cace1bcbcf79addd"} Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.628887 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"5318fd5f95d992608bbfc8d723221d42d2a1d049956099259128e9e6d81b1c00"} Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.628896 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"a53f2fbb881352cf9c87178f322a50d313f3b9a68c510841dd957ffa053195f5"} Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.628906 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"b5a9b450a18abbefe786ff720463ad1577e6632b8d4b647093fa7dbcbe88fc70"} Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.630351 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-dd2gb" event={"ID":"271549ee-1768-4a76-bbc9-d931689a6ad9","Type":"ContainerDied","Data":"883885eef4f01fe2706eddfb7bf0a155f4910fc13d04ad51a8636a6c51773694"} Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.630374 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="883885eef4f01fe2706eddfb7bf0a155f4910fc13d04ad51a8636a6c51773694" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.630425 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-dd2gb" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.881859 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-l6gbw"] Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882247 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c4310ab2-2a7e-4c02-a58c-f50d4d85882d" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882269 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c4310ab2-2a7e-4c02-a58c-f50d4d85882d" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882289 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27d97449-bfcc-4c56-8b48-b2f604661b77" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882322 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="27d97449-bfcc-4c56-8b48-b2f604661b77" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882337 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="52dcbd14-37d0-437d-882d-c05591461848" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882345 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="52dcbd14-37d0-437d-882d-c05591461848" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882357 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25488b08-5c2b-47d5-a7c1-1c1609bf8dab" containerName="keystone-db-sync" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882364 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="25488b08-5c2b-47d5-a7c1-1c1609bf8dab" containerName="keystone-db-sync" Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882378 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="003173d7-ba20-4971-b03e-ba2fb5039ff7" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882386 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="003173d7-ba20-4971-b03e-ba2fb5039ff7" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882410 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99d012f6-fa77-4ff5-8dc6-7e1c48ec7365" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882419 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="99d012f6-fa77-4ff5-8dc6-7e1c48ec7365" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882440 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="796fddd5-4127-4632-8728-406e29348c74" containerName="swift-ring-rebalance" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882447 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="796fddd5-4127-4632-8728-406e29348c74" containerName="swift-ring-rebalance" Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882455 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="271549ee-1768-4a76-bbc9-d931689a6ad9" containerName="glance-db-sync" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882462 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="271549ee-1768-4a76-bbc9-d931689a6ad9" containerName="glance-db-sync" Dec 05 01:31:37 crc kubenswrapper[4665]: E1205 01:31:37.882475 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff695797-ac78-42f0-9a60-96aa898b80f5" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882484 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff695797-ac78-42f0-9a60-96aa898b80f5" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882671 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="25488b08-5c2b-47d5-a7c1-1c1609bf8dab" containerName="keystone-db-sync" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882688 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="52dcbd14-37d0-437d-882d-c05591461848" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882710 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="27d97449-bfcc-4c56-8b48-b2f604661b77" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882724 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff695797-ac78-42f0-9a60-96aa898b80f5" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882740 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="271549ee-1768-4a76-bbc9-d931689a6ad9" containerName="glance-db-sync" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882755 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="99d012f6-fa77-4ff5-8dc6-7e1c48ec7365" containerName="mariadb-account-create-update" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882769 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="796fddd5-4127-4632-8728-406e29348c74" containerName="swift-ring-rebalance" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882801 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="c4310ab2-2a7e-4c02-a58c-f50d4d85882d" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.882818 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="003173d7-ba20-4971-b03e-ba2fb5039ff7" containerName="mariadb-database-create" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.883887 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.909072 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-l6gbw"] Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.968186 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-dqbqb"] Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.969556 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.981353 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.990192 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6ssd4" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.990455 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.990616 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 01:31:37 crc kubenswrapper[4665]: I1205 01:31:37.990631 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.023570 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dqbqb"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041387 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-credential-keys\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041470 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-scripts\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041498 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041542 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnjhd\" (UniqueName: \"kubernetes.io/projected/2c7e82db-d9a2-4e59-8613-e58e9abfce55-kube-api-access-jnjhd\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041631 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-combined-ca-bundle\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041667 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-config-data\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041706 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-fernet-keys\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041735 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041828 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-dns-svc\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041871 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28zwg\" (UniqueName: \"kubernetes.io/projected/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-kube-api-access-28zwg\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.041906 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-config\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.143682 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-dns-svc\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.143744 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28zwg\" (UniqueName: \"kubernetes.io/projected/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-kube-api-access-28zwg\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.143789 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-config\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.143894 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-credential-keys\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.143932 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-scripts\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.143955 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnjhd\" (UniqueName: \"kubernetes.io/projected/2c7e82db-d9a2-4e59-8613-e58e9abfce55-kube-api-access-jnjhd\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.143983 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.144045 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-combined-ca-bundle\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.144080 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-config-data\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.144107 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-fernet-keys\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.144137 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.144951 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-dns-svc\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.145208 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-nb\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.145209 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-config\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.146015 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-sb\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.152911 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-fernet-keys\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.153391 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-config-data\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.153779 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-scripts\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.165866 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-credential-keys\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.172259 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-combined-ca-bundle\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.212754 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-l6gbw"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.213786 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnjhd\" (UniqueName: \"kubernetes.io/projected/2c7e82db-d9a2-4e59-8613-e58e9abfce55-kube-api-access-jnjhd\") pod \"keystone-bootstrap-dqbqb\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.219247 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28zwg\" (UniqueName: \"kubernetes.io/projected/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-kube-api-access-28zwg\") pod \"dnsmasq-dns-f877ddd87-l6gbw\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: E1205 01:31:38.227194 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-28zwg], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" podUID="1b2568dd-1397-4d8a-90af-a5b8e81f58a3" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.298966 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.310702 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-784f69c749-swzpt"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.312071 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.452633 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-sb\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.452703 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-dns-svc\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.452734 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kbm4m\" (UniqueName: \"kubernetes.io/projected/f47233c0-8438-464e-852c-6cbf9ff63a59-kube-api-access-kbm4m\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.452760 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-nb\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.452799 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-config\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.472759 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-777959986f-xdlws"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.474068 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.481758 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-rkvs6"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.482836 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.483665 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-h8cmw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.483851 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.497500 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.500427 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.501391 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9kmh2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.501535 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.508420 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-784f69c749-swzpt"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.516043 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.540368 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-777959986f-xdlws"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.553982 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-scripts\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554044 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-config\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554087 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-config-data\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554120 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dsmfm\" (UniqueName: \"kubernetes.io/projected/2f28d1d6-99d2-4793-a5da-305accdaad6f-kube-api-access-dsmfm\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554142 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-scripts\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554163 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-config-data\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554179 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f2a58335-982b-42ff-933c-f93d38fbb197-etc-machine-id\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554202 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dm6fs\" (UniqueName: \"kubernetes.io/projected/f2a58335-982b-42ff-933c-f93d38fbb197-kube-api-access-dm6fs\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554220 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2f28d1d6-99d2-4793-a5da-305accdaad6f-horizon-secret-key\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554244 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-sb\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554265 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-dns-svc\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554280 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-combined-ca-bundle\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554319 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-db-sync-config-data\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554338 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kbm4m\" (UniqueName: \"kubernetes.io/projected/f47233c0-8438-464e-852c-6cbf9ff63a59-kube-api-access-kbm4m\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554355 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28d1d6-99d2-4793-a5da-305accdaad6f-logs\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.554391 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-nb\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.555192 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-nb\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.555946 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-sb\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.556369 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-dns-svc\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.565561 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-config\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.569959 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rkvs6"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.619130 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-v6sj2"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.630176 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.637090 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.637925 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.641005 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-h4dhb" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.654473 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kbm4m\" (UniqueName: \"kubernetes.io/projected/f47233c0-8438-464e-852c-6cbf9ff63a59-kube-api-access-kbm4m\") pod \"dnsmasq-dns-784f69c749-swzpt\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.655821 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dsmfm\" (UniqueName: \"kubernetes.io/projected/2f28d1d6-99d2-4793-a5da-305accdaad6f-kube-api-access-dsmfm\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.655861 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-scripts\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.655888 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-config-data\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.655911 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f2a58335-982b-42ff-933c-f93d38fbb197-etc-machine-id\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.655943 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dm6fs\" (UniqueName: \"kubernetes.io/projected/f2a58335-982b-42ff-933c-f93d38fbb197-kube-api-access-dm6fs\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.655969 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2f28d1d6-99d2-4793-a5da-305accdaad6f-horizon-secret-key\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.656002 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-combined-ca-bundle\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.656031 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-db-sync-config-data\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.656050 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28d1d6-99d2-4793-a5da-305accdaad6f-logs\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.656088 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-scripts\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.656139 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-config-data\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.657157 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-scripts\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.657282 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-config-data\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.657385 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f2a58335-982b-42ff-933c-f93d38fbb197-etc-machine-id\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.657612 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28d1d6-99d2-4793-a5da-305accdaad6f-logs\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.687767 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-config-data\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.690980 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-db-sync-config-data\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.695400 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-combined-ca-bundle\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.698952 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-scripts\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.699141 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2f28d1d6-99d2-4793-a5da-305accdaad6f-horizon-secret-key\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.714700 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dsmfm\" (UniqueName: \"kubernetes.io/projected/2f28d1d6-99d2-4793-a5da-305accdaad6f-kube-api-access-dsmfm\") pod \"horizon-777959986f-xdlws\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.746202 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-v6sj2"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.757239 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2x2lm\" (UniqueName: \"kubernetes.io/projected/a032ea63-dc16-4378-b848-9a4f1274f860-kube-api-access-2x2lm\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.757425 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-config\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.757507 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-combined-ca-bundle\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.771852 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-267l7"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.772898 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.780851 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dm6fs\" (UniqueName: \"kubernetes.io/projected/f2a58335-982b-42ff-933c-f93d38fbb197-kube-api-access-dm6fs\") pod \"cinder-db-sync-rkvs6\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.786830 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.787527 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-44zp6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.799514 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-267l7"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.800242 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.801070 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"86ddd383dc8226f15a2e49467afc153aaa78c3a93f7395c808be03ec9448281c"} Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.801100 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f","Type":"ContainerStarted","Data":"36ca8089a76ee4412532e6a539f4c015b3d3e4500df4695140937f6a11cf501d"} Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.806174 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-777959986f-xdlws" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.846739 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.862334 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-combined-ca-bundle\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.862898 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2x2lm\" (UniqueName: \"kubernetes.io/projected/a032ea63-dc16-4378-b848-9a4f1274f860-kube-api-access-2x2lm\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.862981 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-config\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.880929 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-config\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.889823 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-combined-ca-bundle\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.889884 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-zhgm4"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.890917 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.931576 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.931785 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-v4657" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.931806 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.931973 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.955939 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.983022 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2x2lm\" (UniqueName: \"kubernetes.io/projected/a032ea63-dc16-4378-b848-9a4f1274f860-kube-api-access-2x2lm\") pod \"neutron-db-sync-v6sj2\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.984798 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-784f69c749-swzpt"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.984836 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zhgm4"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.984851 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.986791 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-647f7cc89f-n8zf8"] Dec 05 01:31:38 crc kubenswrapper[4665]: I1205 01:31:38.991917 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.006446 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.006807 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.007376 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.065959 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-nb\") pod \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.066023 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-dns-svc\") pod \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.066075 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-config\") pod \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.067691 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28zwg\" (UniqueName: \"kubernetes.io/projected/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-kube-api-access-28zwg\") pod \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.067742 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-sb\") pod \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\" (UID: \"1b2568dd-1397-4d8a-90af-a5b8e81f58a3\") " Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.067956 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-combined-ca-bundle\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.067998 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnwsx\" (UniqueName: \"kubernetes.io/projected/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-kube-api-access-vnwsx\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.068099 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-db-sync-config-data\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.068582 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "1b2568dd-1397-4d8a-90af-a5b8e81f58a3" (UID: "1b2568dd-1397-4d8a-90af-a5b8e81f58a3"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.068887 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "1b2568dd-1397-4d8a-90af-a5b8e81f58a3" (UID: "1b2568dd-1397-4d8a-90af-a5b8e81f58a3"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.069150 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.069183 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-config" (OuterVolumeSpecName: "config") pod "1b2568dd-1397-4d8a-90af-a5b8e81f58a3" (UID: "1b2568dd-1397-4d8a-90af-a5b8e81f58a3"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.070384 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "1b2568dd-1397-4d8a-90af-a5b8e81f58a3" (UID: "1b2568dd-1397-4d8a-90af-a5b8e81f58a3"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.082695 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-kube-api-access-28zwg" (OuterVolumeSpecName: "kube-api-access-28zwg") pod "1b2568dd-1397-4d8a-90af-a5b8e81f58a3" (UID: "1b2568dd-1397-4d8a-90af-a5b8e81f58a3"). InnerVolumeSpecName "kube-api-access-28zwg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.093963 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-647f7cc89f-n8zf8"] Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.111655 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-jk9zf"] Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.113050 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.122927 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181144 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8tcgq\" (UniqueName: \"kubernetes.io/projected/6b995216-a8c4-418c-9a82-eff79ca5360c-kube-api-access-8tcgq\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181211 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181235 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-log-httpd\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181267 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-db-sync-config-data\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181313 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-config\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181334 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-combined-ca-bundle\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181365 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-scripts\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181390 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bghqz\" (UniqueName: \"kubernetes.io/projected/de47bb8c-ea83-4f9a-be28-5716b59d25ed-kube-api-access-bghqz\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181427 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-logs\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181447 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181492 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-sb\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181507 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gtn7m\" (UniqueName: \"kubernetes.io/projected/fe3b0954-8978-4517-90b5-40cefa8d36a7-kube-api-access-gtn7m\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181525 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nzxrv\" (UniqueName: \"kubernetes.io/projected/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-kube-api-access-nzxrv\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181541 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-config-data\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181559 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de47bb8c-ea83-4f9a-be28-5716b59d25ed-logs\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181581 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-dns-svc\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181598 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-combined-ca-bundle\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181618 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-scripts\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181633 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-scripts\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181649 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-horizon-secret-key\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181664 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-run-httpd\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181681 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnwsx\" (UniqueName: \"kubernetes.io/projected/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-kube-api-access-vnwsx\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181706 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-nb\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181735 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-config-data\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181755 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-config-data\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181797 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181807 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181817 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181829 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28zwg\" (UniqueName: \"kubernetes.io/projected/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-kube-api-access-28zwg\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.181838 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1b2568dd-1397-4d8a-90af-a5b8e81f58a3-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.182360 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-jk9zf"] Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.189276 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-combined-ca-bundle\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.199809 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-db-sync-config-data\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.226632 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.229687 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.248391 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=38.560229587 podStartE2EDuration="45.248370362s" podCreationTimestamp="2025-12-05 01:30:54 +0000 UTC" firstStartedPulling="2025-12-05 01:31:29.658227571 +0000 UTC m=+1264.997619870" lastFinishedPulling="2025-12-05 01:31:36.346368346 +0000 UTC m=+1271.685760645" observedRunningTime="2025-12-05 01:31:39.188014954 +0000 UTC m=+1274.527407253" watchObservedRunningTime="2025-12-05 01:31:39.248370362 +0000 UTC m=+1274.587762651" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.252101 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-wn6mh" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.268803 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.269702 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.274780 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283565 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283619 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-log-httpd\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283658 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-config\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283676 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-combined-ca-bundle\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283695 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-scripts\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283713 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bghqz\" (UniqueName: \"kubernetes.io/projected/de47bb8c-ea83-4f9a-be28-5716b59d25ed-kube-api-access-bghqz\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283734 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-scripts\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283755 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-logs\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283774 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283800 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-config-data\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283829 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-sb\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283844 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gtn7m\" (UniqueName: \"kubernetes.io/projected/fe3b0954-8978-4517-90b5-40cefa8d36a7-kube-api-access-gtn7m\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283861 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nzxrv\" (UniqueName: \"kubernetes.io/projected/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-kube-api-access-nzxrv\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283878 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-config-data\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283908 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-logs\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283924 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de47bb8c-ea83-4f9a-be28-5716b59d25ed-logs\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283950 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283968 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.283987 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-dns-svc\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284012 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-scripts\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284029 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-scripts\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284044 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-horizon-secret-key\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284061 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-run-httpd\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284086 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vclv7\" (UniqueName: \"kubernetes.io/projected/c9c26ce5-defc-45b8-8c73-91ea71cd9554-kube-api-access-vclv7\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284112 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-nb\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284131 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-config-data\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284151 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-config-data\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284166 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.284185 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8tcgq\" (UniqueName: \"kubernetes.io/projected/6b995216-a8c4-418c-9a82-eff79ca5360c-kube-api-access-8tcgq\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.287845 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-log-httpd\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.296582 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-config\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.297674 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-nb\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.297941 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-run-httpd\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.304755 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-scripts\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.305256 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-logs\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.313805 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de47bb8c-ea83-4f9a-be28-5716b59d25ed-logs\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.317189 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-dns-svc\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.318552 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-sb\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.335047 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-config-data\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.335522 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnwsx\" (UniqueName: \"kubernetes.io/projected/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-kube-api-access-vnwsx\") pod \"barbican-db-sync-267l7\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.360549 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-config-data\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.364375 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.366337 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-combined-ca-bundle\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.373663 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.374576 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-scripts\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.378774 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nzxrv\" (UniqueName: \"kubernetes.io/projected/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-kube-api-access-nzxrv\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.381815 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-scripts\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.382414 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-config-data\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.382768 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8tcgq\" (UniqueName: \"kubernetes.io/projected/6b995216-a8c4-418c-9a82-eff79ca5360c-kube-api-access-8tcgq\") pod \"ceilometer-0\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.384867 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vclv7\" (UniqueName: \"kubernetes.io/projected/c9c26ce5-defc-45b8-8c73-91ea71cd9554-kube-api-access-vclv7\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.384920 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.384978 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-scripts\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.385017 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-config-data\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.385050 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-logs\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.385071 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.385087 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.388257 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bghqz\" (UniqueName: \"kubernetes.io/projected/de47bb8c-ea83-4f9a-be28-5716b59d25ed-kube-api-access-bghqz\") pod \"placement-db-sync-zhgm4\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.389101 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.389399 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-logs\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.393694 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-scripts\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.397724 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.424057 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vclv7\" (UniqueName: \"kubernetes.io/projected/c9c26ce5-defc-45b8-8c73-91ea71cd9554-kube-api-access-vclv7\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.469728 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-horizon-secret-key\") pod \"horizon-647f7cc89f-n8zf8\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.470571 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gtn7m\" (UniqueName: \"kubernetes.io/projected/fe3b0954-8978-4517-90b5-40cefa8d36a7-kube-api-access-gtn7m\") pod \"dnsmasq-dns-f84976bdf-jk9zf\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.482744 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.485419 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-config-data\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.508411 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-267l7" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.508557 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.574525 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.585872 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zhgm4" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.692819 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.695268 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.737365 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.772818 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.778512 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.789418 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.836951 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-logs\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.837002 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.837069 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.837118 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.837143 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.837181 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.837206 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bhxzx\" (UniqueName: \"kubernetes.io/projected/a394b847-cc03-44bf-bb3a-87f69cb6921f-kube-api-access-bhxzx\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.960951 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.967942 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.972218 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.972470 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.972862 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.973608 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.973721 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bhxzx\" (UniqueName: \"kubernetes.io/projected/a394b847-cc03-44bf-bb3a-87f69cb6921f-kube-api-access-bhxzx\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.973832 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-logs\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.973883 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.974506 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-logs\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.980467 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-config-data\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:39 crc kubenswrapper[4665]: I1205 01:31:39.991328 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:39.993083 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:39.990849 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-scripts\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.023996 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bhxzx\" (UniqueName: \"kubernetes.io/projected/a394b847-cc03-44bf-bb3a-87f69cb6921f-kube-api-access-bhxzx\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.037412 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-dqbqb"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.046956 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dqbqb" event={"ID":"2c7e82db-d9a2-4e59-8613-e58e9abfce55","Type":"ContainerStarted","Data":"a11a9cc5eba76cdaa0e1e0363cdb18acc372dc994945fd6df12e9ef368e095cf"} Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.047229 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f877ddd87-l6gbw" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.081520 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-jk9zf"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.148102 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-d8qlp"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.152061 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.155177 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.166808 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.186563 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-d8qlp"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.188875 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.262614 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-l6gbw"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.271863 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f877ddd87-l6gbw"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.283696 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-777959986f-xdlws"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.287109 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n4rb2\" (UniqueName: \"kubernetes.io/projected/bc826875-4d2d-4dd2-a827-852a00d33450-kube-api-access-n4rb2\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.287181 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-config\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.287227 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.287271 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.287337 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.287355 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.385621 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-rkvs6"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.388416 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-config\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.388480 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.388516 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.388550 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.388572 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.388638 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n4rb2\" (UniqueName: \"kubernetes.io/projected/bc826875-4d2d-4dd2-a827-852a00d33450-kube-api-access-n4rb2\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.389780 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-config\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.390865 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-swift-storage-0\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.391696 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-sb\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.392267 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-svc\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.394153 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-nb\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.425109 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n4rb2\" (UniqueName: \"kubernetes.io/projected/bc826875-4d2d-4dd2-a827-852a00d33450-kube-api-access-n4rb2\") pod \"dnsmasq-dns-785d8bcb8c-d8qlp\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.481192 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.515564 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-v6sj2"] Dec 05 01:31:40 crc kubenswrapper[4665]: I1205 01:31:40.558471 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-784f69c749-swzpt"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:40.886113 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:40.922000 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1b2568dd-1397-4d8a-90af-a5b8e81f58a3" path="/var/lib/kubelet/pods/1b2568dd-1397-4d8a-90af-a5b8e81f58a3/volumes" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:40.922447 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-zhgm4"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:40.929619 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-267l7"] Dec 05 01:31:42 crc kubenswrapper[4665]: W1205 01:31:40.940341 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6b995216_a8c4_418c_9a82_eff79ca5360c.slice/crio-60a08a924e26a8d8ab25f853125cda0bd7de6cfcef34172f6900b38eb44285f5 WatchSource:0}: Error finding container 60a08a924e26a8d8ab25f853125cda0bd7de6cfcef34172f6900b38eb44285f5: Status 404 returned error can't find the container with id 60a08a924e26a8d8ab25f853125cda0bd7de6cfcef34172f6900b38eb44285f5 Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.175621 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zhgm4" event={"ID":"de47bb8c-ea83-4f9a-be28-5716b59d25ed","Type":"ContainerStarted","Data":"66695e214d82faf35292bd21e901d0eccb28a52dc2a415910624784178b1a0c9"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.177798 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-784f69c749-swzpt" event={"ID":"f47233c0-8438-464e-852c-6cbf9ff63a59","Type":"ContainerStarted","Data":"a8092f2a6fc8dc215e6aa89f1c28b4c501d95db48fdfeadf3a65ddf089695a08"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.183778 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dqbqb" event={"ID":"2c7e82db-d9a2-4e59-8613-e58e9abfce55","Type":"ContainerStarted","Data":"1564ac14287cf31d6097d9826fb090d1656d825413fdc3a01781fda810787970"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.187438 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerStarted","Data":"60a08a924e26a8d8ab25f853125cda0bd7de6cfcef34172f6900b38eb44285f5"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.190507 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-267l7" event={"ID":"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92","Type":"ContainerStarted","Data":"5bf65a4c44acd7eb92d14e2196872128112c35ad8a18a3d242e49d9e35964535"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.192707 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-777959986f-xdlws" event={"ID":"2f28d1d6-99d2-4793-a5da-305accdaad6f","Type":"ContainerStarted","Data":"fd29b27315f2efa1c501af155b7c96bab37801793801f99355497ff5524988db"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.227927 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rkvs6" event={"ID":"f2a58335-982b-42ff-933c-f93d38fbb197","Type":"ContainerStarted","Data":"32671ec205d89a6ecbd15bb1aab71dd1f410bb3c2ab3195e0c53e89a3e9c3669"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.233111 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.238424 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-dqbqb" podStartSLOduration=4.238404023 podStartE2EDuration="4.238404023s" podCreationTimestamp="2025-12-05 01:31:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:31:41.218196929 +0000 UTC m=+1276.557589248" watchObservedRunningTime="2025-12-05 01:31:41.238404023 +0000 UTC m=+1276.577796322" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:41.247748 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-v6sj2" event={"ID":"a032ea63-dc16-4378-b848-9a4f1274f860","Type":"ContainerStarted","Data":"0b82d339c396515b01062969fff95ea69fa6347a09ee4d84f605d6981b6caad7"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.099686 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.151907 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-647f7cc89f-n8zf8"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.223047 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-6449c7cf9c-cwrn8"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.224955 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.285380 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.334235 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c9c26ce5-defc-45b8-8c73-91ea71cd9554","Type":"ContainerStarted","Data":"92d4469737da68a693b470106407634566bd434a1c440e5023ded298264226b3"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.337965 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6449c7cf9c-cwrn8"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.339051 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-v6sj2" event={"ID":"a032ea63-dc16-4378-b848-9a4f1274f860","Type":"ContainerStarted","Data":"622e5e533523abb21b92b9f2a80cae4cce61914fe581467d07841e59ba2b7bc7"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.406991 4665 generic.go:334] "Generic (PLEG): container finished" podID="f47233c0-8438-464e-852c-6cbf9ff63a59" containerID="3716224cffb496865ed906163f3fb14141d8c2424bf5b0f2c872340f9f9ee728" exitCode=0 Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.408029 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-784f69c749-swzpt" event={"ID":"f47233c0-8438-464e-852c-6cbf9ff63a59","Type":"ContainerDied","Data":"3716224cffb496865ed906163f3fb14141d8c2424bf5b0f2c872340f9f9ee728"} Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.408724 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5ec4949b-8f9e-4c8f-8070-546ec52b3213-horizon-secret-key\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.408781 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jtc2\" (UniqueName: \"kubernetes.io/projected/5ec4949b-8f9e-4c8f-8070-546ec52b3213-kube-api-access-5jtc2\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.408830 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-config-data\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.408866 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ec4949b-8f9e-4c8f-8070-546ec52b3213-logs\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.408902 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-scripts\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.445989 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-v6sj2" podStartSLOduration=4.44596714 podStartE2EDuration="4.44596714s" podCreationTimestamp="2025-12-05 01:31:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:31:42.395873058 +0000 UTC m=+1277.735265357" watchObservedRunningTime="2025-12-05 01:31:42.44596714 +0000 UTC m=+1277.785359439" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.510227 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ec4949b-8f9e-4c8f-8070-546ec52b3213-logs\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.510339 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-scripts\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.510444 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5ec4949b-8f9e-4c8f-8070-546ec52b3213-horizon-secret-key\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.510542 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jtc2\" (UniqueName: \"kubernetes.io/projected/5ec4949b-8f9e-4c8f-8070-546ec52b3213-kube-api-access-5jtc2\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.510649 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-config-data\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.514638 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ec4949b-8f9e-4c8f-8070-546ec52b3213-logs\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.517176 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-scripts\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.520482 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-config-data\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.540110 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jtc2\" (UniqueName: \"kubernetes.io/projected/5ec4949b-8f9e-4c8f-8070-546ec52b3213-kube-api-access-5jtc2\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.554069 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5ec4949b-8f9e-4c8f-8070-546ec52b3213-horizon-secret-key\") pod \"horizon-6449c7cf9c-cwrn8\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.576164 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.757072 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.816672 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-jk9zf"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.831357 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-d8qlp"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.836034 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-647f7cc89f-n8zf8"] Dec 05 01:31:42 crc kubenswrapper[4665]: I1205 01:31:42.974338 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:31:42 crc kubenswrapper[4665]: W1205 01:31:42.985636 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda394b847_cc03_44bf_bb3a_87f69cb6921f.slice/crio-70a534f4e8fe3d46b155e1b688cbece03c368baf0cefe0a0e5cd5709a7d8f23c WatchSource:0}: Error finding container 70a534f4e8fe3d46b155e1b688cbece03c368baf0cefe0a0e5cd5709a7d8f23c: Status 404 returned error can't find the container with id 70a534f4e8fe3d46b155e1b688cbece03c368baf0cefe0a0e5cd5709a7d8f23c Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.345247 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-6449c7cf9c-cwrn8"] Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.379600 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.535123 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6449c7cf9c-cwrn8" event={"ID":"5ec4949b-8f9e-4c8f-8070-546ec52b3213","Type":"ContainerStarted","Data":"75f37b24d2c1d569336bf552eb2e2718efe34ec6cd88446f0b385913f7847809"} Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.549137 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-sb\") pod \"f47233c0-8438-464e-852c-6cbf9ff63a59\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.549271 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-dns-svc\") pod \"f47233c0-8438-464e-852c-6cbf9ff63a59\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.549450 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kbm4m\" (UniqueName: \"kubernetes.io/projected/f47233c0-8438-464e-852c-6cbf9ff63a59-kube-api-access-kbm4m\") pod \"f47233c0-8438-464e-852c-6cbf9ff63a59\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.549555 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-config\") pod \"f47233c0-8438-464e-852c-6cbf9ff63a59\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.549713 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-nb\") pod \"f47233c0-8438-464e-852c-6cbf9ff63a59\" (UID: \"f47233c0-8438-464e-852c-6cbf9ff63a59\") " Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.573948 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c9c26ce5-defc-45b8-8c73-91ea71cd9554","Type":"ContainerStarted","Data":"fa2b36bab26bbdf9202f4eb7d9d52c39c0ae04e1b595deaa762be73416912664"} Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.625799 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f47233c0-8438-464e-852c-6cbf9ff63a59" (UID: "f47233c0-8438-464e-852c-6cbf9ff63a59"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.654684 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f47233c0-8438-464e-852c-6cbf9ff63a59" (UID: "f47233c0-8438-464e-852c-6cbf9ff63a59"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.663533 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f47233c0-8438-464e-852c-6cbf9ff63a59-kube-api-access-kbm4m" (OuterVolumeSpecName: "kube-api-access-kbm4m") pod "f47233c0-8438-464e-852c-6cbf9ff63a59" (UID: "f47233c0-8438-464e-852c-6cbf9ff63a59"). InnerVolumeSpecName "kube-api-access-kbm4m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.674791 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kbm4m\" (UniqueName: \"kubernetes.io/projected/f47233c0-8438-464e-852c-6cbf9ff63a59-kube-api-access-kbm4m\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.674964 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.675038 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.680951 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" event={"ID":"bc826875-4d2d-4dd2-a827-852a00d33450","Type":"ContainerStarted","Data":"f2c6a4bc8e2baedf98c5ef88496b9d8c0905093f7821110b993a6d4d1dc4ab93"} Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.745023 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a394b847-cc03-44bf-bb3a-87f69cb6921f","Type":"ContainerStarted","Data":"70a534f4e8fe3d46b155e1b688cbece03c368baf0cefe0a0e5cd5709a7d8f23c"} Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.747691 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f47233c0-8438-464e-852c-6cbf9ff63a59" (UID: "f47233c0-8438-464e-852c-6cbf9ff63a59"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.757064 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" event={"ID":"fe3b0954-8978-4517-90b5-40cefa8d36a7","Type":"ContainerStarted","Data":"d07c2198acca30b73aad32b6c454765b36b70e32f406bad8e4006315b3f95fa8"} Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.768844 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-config" (OuterVolumeSpecName: "config") pod "f47233c0-8438-464e-852c-6cbf9ff63a59" (UID: "f47233c0-8438-464e-852c-6cbf9ff63a59"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.778211 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.778238 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f47233c0-8438-464e-852c-6cbf9ff63a59-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.792381 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-647f7cc89f-n8zf8" event={"ID":"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b","Type":"ContainerStarted","Data":"a9027ac21efbd53a4eb4da0f54dd7ae8ffce154de1b91d15a7452c5a0e0d77d2"} Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.802433 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-784f69c749-swzpt" event={"ID":"f47233c0-8438-464e-852c-6cbf9ff63a59","Type":"ContainerDied","Data":"a8092f2a6fc8dc215e6aa89f1c28b4c501d95db48fdfeadf3a65ddf089695a08"} Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.802532 4665 scope.go:117] "RemoveContainer" containerID="3716224cffb496865ed906163f3fb14141d8c2424bf5b0f2c872340f9f9ee728" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.811776 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-784f69c749-swzpt" Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.923230 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-784f69c749-swzpt"] Dec 05 01:31:43 crc kubenswrapper[4665]: I1205 01:31:43.932228 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-784f69c749-swzpt"] Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.829643 4665 generic.go:334] "Generic (PLEG): container finished" podID="bc826875-4d2d-4dd2-a827-852a00d33450" containerID="c8b95daad92f45b4657d8bfcc7eaf749d8c87dda64698db67ae93e86362cdbbc" exitCode=0 Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.830232 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" event={"ID":"bc826875-4d2d-4dd2-a827-852a00d33450","Type":"ContainerDied","Data":"c8b95daad92f45b4657d8bfcc7eaf749d8c87dda64698db67ae93e86362cdbbc"} Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.830316 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" event={"ID":"bc826875-4d2d-4dd2-a827-852a00d33450","Type":"ContainerStarted","Data":"975581cab7f88367f9be540441a904f7f91637db276177605072f908ceaa3a20"} Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.830671 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.865320 4665 generic.go:334] "Generic (PLEG): container finished" podID="fe3b0954-8978-4517-90b5-40cefa8d36a7" containerID="570c61e57c34ffd988c8dcb4bb2582857e5a257e8cdb5318b641da71356c9923" exitCode=0 Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.865484 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" event={"ID":"fe3b0954-8978-4517-90b5-40cefa8d36a7","Type":"ContainerDied","Data":"570c61e57c34ffd988c8dcb4bb2582857e5a257e8cdb5318b641da71356c9923"} Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.934422 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" podStartSLOduration=5.934406582 podStartE2EDuration="5.934406582s" podCreationTimestamp="2025-12-05 01:31:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:31:44.854231788 +0000 UTC m=+1280.193624087" watchObservedRunningTime="2025-12-05 01:31:44.934406582 +0000 UTC m=+1280.273798881" Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.958774 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerName="glance-log" containerID="cri-o://fa2b36bab26bbdf9202f4eb7d9d52c39c0ae04e1b595deaa762be73416912664" gracePeriod=30 Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.958882 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerName="glance-httpd" containerID="cri-o://9a4a208e54a416098ca9b533905eaca7b24ecd1f1b3334082fffe78254d4eb89" gracePeriod=30 Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.959510 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f47233c0-8438-464e-852c-6cbf9ff63a59" path="/var/lib/kubelet/pods/f47233c0-8438-464e-852c-6cbf9ff63a59/volumes" Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.959964 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c9c26ce5-defc-45b8-8c73-91ea71cd9554","Type":"ContainerStarted","Data":"9a4a208e54a416098ca9b533905eaca7b24ecd1f1b3334082fffe78254d4eb89"} Dec 05 01:31:44 crc kubenswrapper[4665]: I1205 01:31:44.988468 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a394b847-cc03-44bf-bb3a-87f69cb6921f","Type":"ContainerStarted","Data":"ea36421967e1492d011510de0c1b7f2344fada03a02a56af90c650df129ae784"} Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.369703 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.369681837 podStartE2EDuration="6.369681837s" podCreationTimestamp="2025-12-05 01:31:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:31:45.364660256 +0000 UTC m=+1280.704052555" watchObservedRunningTime="2025-12-05 01:31:45.369681837 +0000 UTC m=+1280.709074136" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.726376 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.800541 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-sb\") pod \"fe3b0954-8978-4517-90b5-40cefa8d36a7\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.800704 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-nb\") pod \"fe3b0954-8978-4517-90b5-40cefa8d36a7\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.800762 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-config\") pod \"fe3b0954-8978-4517-90b5-40cefa8d36a7\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.800781 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-dns-svc\") pod \"fe3b0954-8978-4517-90b5-40cefa8d36a7\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.800827 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gtn7m\" (UniqueName: \"kubernetes.io/projected/fe3b0954-8978-4517-90b5-40cefa8d36a7-kube-api-access-gtn7m\") pod \"fe3b0954-8978-4517-90b5-40cefa8d36a7\" (UID: \"fe3b0954-8978-4517-90b5-40cefa8d36a7\") " Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.830161 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "fe3b0954-8978-4517-90b5-40cefa8d36a7" (UID: "fe3b0954-8978-4517-90b5-40cefa8d36a7"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.832394 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fe3b0954-8978-4517-90b5-40cefa8d36a7-kube-api-access-gtn7m" (OuterVolumeSpecName: "kube-api-access-gtn7m") pod "fe3b0954-8978-4517-90b5-40cefa8d36a7" (UID: "fe3b0954-8978-4517-90b5-40cefa8d36a7"). InnerVolumeSpecName "kube-api-access-gtn7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.834208 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-config" (OuterVolumeSpecName: "config") pod "fe3b0954-8978-4517-90b5-40cefa8d36a7" (UID: "fe3b0954-8978-4517-90b5-40cefa8d36a7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.836680 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "fe3b0954-8978-4517-90b5-40cefa8d36a7" (UID: "fe3b0954-8978-4517-90b5-40cefa8d36a7"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.903668 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.903699 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.903708 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.903719 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gtn7m\" (UniqueName: \"kubernetes.io/projected/fe3b0954-8978-4517-90b5-40cefa8d36a7-kube-api-access-gtn7m\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:45 crc kubenswrapper[4665]: I1205 01:31:45.911250 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "fe3b0954-8978-4517-90b5-40cefa8d36a7" (UID: "fe3b0954-8978-4517-90b5-40cefa8d36a7"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.005118 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/fe3b0954-8978-4517-90b5-40cefa8d36a7-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.024456 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" event={"ID":"fe3b0954-8978-4517-90b5-40cefa8d36a7","Type":"ContainerDied","Data":"d07c2198acca30b73aad32b6c454765b36b70e32f406bad8e4006315b3f95fa8"} Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.024791 4665 scope.go:117] "RemoveContainer" containerID="570c61e57c34ffd988c8dcb4bb2582857e5a257e8cdb5318b641da71356c9923" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.025017 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-f84976bdf-jk9zf" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.046580 4665 generic.go:334] "Generic (PLEG): container finished" podID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerID="9a4a208e54a416098ca9b533905eaca7b24ecd1f1b3334082fffe78254d4eb89" exitCode=0 Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.046613 4665 generic.go:334] "Generic (PLEG): container finished" podID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerID="fa2b36bab26bbdf9202f4eb7d9d52c39c0ae04e1b595deaa762be73416912664" exitCode=143 Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.046940 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c9c26ce5-defc-45b8-8c73-91ea71cd9554","Type":"ContainerDied","Data":"9a4a208e54a416098ca9b533905eaca7b24ecd1f1b3334082fffe78254d4eb89"} Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.046994 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c9c26ce5-defc-45b8-8c73-91ea71cd9554","Type":"ContainerDied","Data":"fa2b36bab26bbdf9202f4eb7d9d52c39c0ae04e1b595deaa762be73416912664"} Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.078565 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.122456 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-jk9zf"] Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.132145 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-f84976bdf-jk9zf"] Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.207839 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-scripts\") pod \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.207891 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-combined-ca-bundle\") pod \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.207925 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-config-data\") pod \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.208021 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.208093 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vclv7\" (UniqueName: \"kubernetes.io/projected/c9c26ce5-defc-45b8-8c73-91ea71cd9554-kube-api-access-vclv7\") pod \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.208192 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-httpd-run\") pod \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.208219 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-logs\") pod \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\" (UID: \"c9c26ce5-defc-45b8-8c73-91ea71cd9554\") " Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.209152 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-logs" (OuterVolumeSpecName: "logs") pod "c9c26ce5-defc-45b8-8c73-91ea71cd9554" (UID: "c9c26ce5-defc-45b8-8c73-91ea71cd9554"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.209378 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "c9c26ce5-defc-45b8-8c73-91ea71cd9554" (UID: "c9c26ce5-defc-45b8-8c73-91ea71cd9554"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.222416 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "c9c26ce5-defc-45b8-8c73-91ea71cd9554" (UID: "c9c26ce5-defc-45b8-8c73-91ea71cd9554"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.226495 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c9c26ce5-defc-45b8-8c73-91ea71cd9554-kube-api-access-vclv7" (OuterVolumeSpecName: "kube-api-access-vclv7") pod "c9c26ce5-defc-45b8-8c73-91ea71cd9554" (UID: "c9c26ce5-defc-45b8-8c73-91ea71cd9554"). InnerVolumeSpecName "kube-api-access-vclv7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.226971 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-scripts" (OuterVolumeSpecName: "scripts") pod "c9c26ce5-defc-45b8-8c73-91ea71cd9554" (UID: "c9c26ce5-defc-45b8-8c73-91ea71cd9554"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.275137 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c9c26ce5-defc-45b8-8c73-91ea71cd9554" (UID: "c9c26ce5-defc-45b8-8c73-91ea71cd9554"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.310335 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vclv7\" (UniqueName: \"kubernetes.io/projected/c9c26ce5-defc-45b8-8c73-91ea71cd9554-kube-api-access-vclv7\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.310590 4665 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.310602 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c9c26ce5-defc-45b8-8c73-91ea71cd9554-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.310610 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.310617 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.310638 4665 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.310352 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-config-data" (OuterVolumeSpecName: "config-data") pod "c9c26ce5-defc-45b8-8c73-91ea71cd9554" (UID: "c9c26ce5-defc-45b8-8c73-91ea71cd9554"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.330463 4665 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.412098 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c9c26ce5-defc-45b8-8c73-91ea71cd9554-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.412131 4665 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:46 crc kubenswrapper[4665]: I1205 01:31:46.915633 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fe3b0954-8978-4517-90b5-40cefa8d36a7" path="/var/lib/kubelet/pods/fe3b0954-8978-4517-90b5-40cefa8d36a7/volumes" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.063204 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a394b847-cc03-44bf-bb3a-87f69cb6921f","Type":"ContainerStarted","Data":"26fc1a8a6c0d80a06ac855c6961a6d8729eef1c9a20ad2d6393091d212ab4c62"} Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.063418 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerName="glance-log" containerID="cri-o://ea36421967e1492d011510de0c1b7f2344fada03a02a56af90c650df129ae784" gracePeriod=30 Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.063543 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerName="glance-httpd" containerID="cri-o://26fc1a8a6c0d80a06ac855c6961a6d8729eef1c9a20ad2d6393091d212ab4c62" gracePeriod=30 Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.081240 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"c9c26ce5-defc-45b8-8c73-91ea71cd9554","Type":"ContainerDied","Data":"92d4469737da68a693b470106407634566bd434a1c440e5023ded298264226b3"} Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.081327 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.081335 4665 scope.go:117] "RemoveContainer" containerID="9a4a208e54a416098ca9b533905eaca7b24ecd1f1b3334082fffe78254d4eb89" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.101076 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=9.101051453 podStartE2EDuration="9.101051453s" podCreationTimestamp="2025-12-05 01:31:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:31:47.085054938 +0000 UTC m=+1282.424447237" watchObservedRunningTime="2025-12-05 01:31:47.101051453 +0000 UTC m=+1282.440443752" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.124670 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.136985 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.154239 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:47 crc kubenswrapper[4665]: E1205 01:31:47.155022 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f47233c0-8438-464e-852c-6cbf9ff63a59" containerName="init" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.155035 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f47233c0-8438-464e-852c-6cbf9ff63a59" containerName="init" Dec 05 01:31:47 crc kubenswrapper[4665]: E1205 01:31:47.155055 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerName="glance-httpd" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.155061 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerName="glance-httpd" Dec 05 01:31:47 crc kubenswrapper[4665]: E1205 01:31:47.155072 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fe3b0954-8978-4517-90b5-40cefa8d36a7" containerName="init" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.155077 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="fe3b0954-8978-4517-90b5-40cefa8d36a7" containerName="init" Dec 05 01:31:47 crc kubenswrapper[4665]: E1205 01:31:47.155085 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerName="glance-log" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.155091 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerName="glance-log" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.155237 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f47233c0-8438-464e-852c-6cbf9ff63a59" containerName="init" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.155252 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerName="glance-log" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.155265 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" containerName="glance-httpd" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.155278 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="fe3b0954-8978-4517-90b5-40cefa8d36a7" containerName="init" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.156218 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.160514 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.195476 4665 scope.go:117] "RemoveContainer" containerID="fa2b36bab26bbdf9202f4eb7d9d52c39c0ae04e1b595deaa762be73416912664" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.204865 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.329378 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.329435 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.329493 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.329581 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-logs\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.329675 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.329800 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v9hqm\" (UniqueName: \"kubernetes.io/projected/b1a1511a-dace-4cdd-85f9-39a189eacf7c-kube-api-access-v9hqm\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.329905 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.431431 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.431539 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v9hqm\" (UniqueName: \"kubernetes.io/projected/b1a1511a-dace-4cdd-85f9-39a189eacf7c-kube-api-access-v9hqm\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.431601 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.431632 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.431678 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.431751 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.431778 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-logs\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.432816 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.432967 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-logs\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.434739 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.444425 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-scripts\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.462101 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v9hqm\" (UniqueName: \"kubernetes.io/projected/b1a1511a-dace-4cdd-85f9-39a189eacf7c-kube-api-access-v9hqm\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.464946 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-config-data\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.466952 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.505984 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " pod="openstack/glance-default-external-api-0" Dec 05 01:31:47 crc kubenswrapper[4665]: I1205 01:31:47.796966 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:31:48 crc kubenswrapper[4665]: I1205 01:31:48.124182 4665 generic.go:334] "Generic (PLEG): container finished" podID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerID="26fc1a8a6c0d80a06ac855c6961a6d8729eef1c9a20ad2d6393091d212ab4c62" exitCode=0 Dec 05 01:31:48 crc kubenswrapper[4665]: I1205 01:31:48.124956 4665 generic.go:334] "Generic (PLEG): container finished" podID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerID="ea36421967e1492d011510de0c1b7f2344fada03a02a56af90c650df129ae784" exitCode=143 Dec 05 01:31:48 crc kubenswrapper[4665]: I1205 01:31:48.124241 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a394b847-cc03-44bf-bb3a-87f69cb6921f","Type":"ContainerDied","Data":"26fc1a8a6c0d80a06ac855c6961a6d8729eef1c9a20ad2d6393091d212ab4c62"} Dec 05 01:31:48 crc kubenswrapper[4665]: I1205 01:31:48.125121 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a394b847-cc03-44bf-bb3a-87f69cb6921f","Type":"ContainerDied","Data":"ea36421967e1492d011510de0c1b7f2344fada03a02a56af90c650df129ae784"} Dec 05 01:31:48 crc kubenswrapper[4665]: I1205 01:31:48.128070 4665 generic.go:334] "Generic (PLEG): container finished" podID="2c7e82db-d9a2-4e59-8613-e58e9abfce55" containerID="1564ac14287cf31d6097d9826fb090d1656d825413fdc3a01781fda810787970" exitCode=0 Dec 05 01:31:48 crc kubenswrapper[4665]: I1205 01:31:48.128130 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dqbqb" event={"ID":"2c7e82db-d9a2-4e59-8613-e58e9abfce55","Type":"ContainerDied","Data":"1564ac14287cf31d6097d9826fb090d1656d825413fdc3a01781fda810787970"} Dec 05 01:31:48 crc kubenswrapper[4665]: I1205 01:31:48.916170 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c9c26ce5-defc-45b8-8c73-91ea71cd9554" path="/var/lib/kubelet/pods/c9c26ce5-defc-45b8-8c73-91ea71cd9554/volumes" Dec 05 01:31:50 crc kubenswrapper[4665]: I1205 01:31:50.483689 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:31:50 crc kubenswrapper[4665]: I1205 01:31:50.539201 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-zpx6p"] Dec 05 01:31:50 crc kubenswrapper[4665]: I1205 01:31:50.539463 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-698758b865-zpx6p" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" containerID="cri-o://724bd78baf4d982d3d09ab22b8646f289386612dd41914ff591e5e2aec6c09fb" gracePeriod=10 Dec 05 01:31:51 crc kubenswrapper[4665]: I1205 01:31:51.160449 4665 generic.go:334] "Generic (PLEG): container finished" podID="b63eb623-410f-4130-bf07-845b294c89f1" containerID="724bd78baf4d982d3d09ab22b8646f289386612dd41914ff591e5e2aec6c09fb" exitCode=0 Dec 05 01:31:51 crc kubenswrapper[4665]: I1205 01:31:51.160602 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-zpx6p" event={"ID":"b63eb623-410f-4130-bf07-845b294c89f1","Type":"ContainerDied","Data":"724bd78baf4d982d3d09ab22b8646f289386612dd41914ff591e5e2aec6c09fb"} Dec 05 01:31:51 crc kubenswrapper[4665]: I1205 01:31:51.391133 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:31:51 crc kubenswrapper[4665]: I1205 01:31:51.951529 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-777959986f-xdlws"] Dec 05 01:31:51 crc kubenswrapper[4665]: I1205 01:31:51.992154 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-86cd4c9876-glfvx"] Dec 05 01:31:51 crc kubenswrapper[4665]: I1205 01:31:51.994355 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.001394 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.016076 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-86cd4c9876-glfvx"] Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.107749 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6449c7cf9c-cwrn8"] Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.134430 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-secret-key\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.134491 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gklsm\" (UniqueName: \"kubernetes.io/projected/2178a916-adc5-4ff5-8972-30b105320f5f-kube-api-access-gklsm\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.134516 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2178a916-adc5-4ff5-8972-30b105320f5f-logs\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.134587 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-scripts\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.134633 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-combined-ca-bundle\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.134650 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-tls-certs\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.134668 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-config-data\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.161716 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-644f785f4-mslbg"] Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.163510 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.193178 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-644f785f4-mslbg"] Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237496 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-combined-ca-bundle\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237545 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-tls-certs\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237579 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hktfw\" (UniqueName: \"kubernetes.io/projected/64407a72-3fdf-450f-b5c0-913ee74bb437-kube-api-access-hktfw\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237611 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-config-data\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237647 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-horizon-secret-key\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237679 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-combined-ca-bundle\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237722 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-secret-key\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237751 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/64407a72-3fdf-450f-b5c0-913ee74bb437-config-data\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237794 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gklsm\" (UniqueName: \"kubernetes.io/projected/2178a916-adc5-4ff5-8972-30b105320f5f-kube-api-access-gklsm\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237840 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2178a916-adc5-4ff5-8972-30b105320f5f-logs\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237877 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-horizon-tls-certs\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237905 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64407a72-3fdf-450f-b5c0-913ee74bb437-logs\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.237933 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-scripts\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.238243 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64407a72-3fdf-450f-b5c0-913ee74bb437-scripts\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.238650 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-scripts\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.238738 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2178a916-adc5-4ff5-8972-30b105320f5f-logs\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.239202 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-config-data\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.250593 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-tls-certs\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.250768 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-combined-ca-bundle\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.261235 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-secret-key\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.268663 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gklsm\" (UniqueName: \"kubernetes.io/projected/2178a916-adc5-4ff5-8972-30b105320f5f-kube-api-access-gklsm\") pod \"horizon-86cd4c9876-glfvx\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.340323 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/64407a72-3fdf-450f-b5c0-913ee74bb437-config-data\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.340653 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-horizon-tls-certs\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.340795 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64407a72-3fdf-450f-b5c0-913ee74bb437-logs\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.340897 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64407a72-3fdf-450f-b5c0-913ee74bb437-scripts\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.341136 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hktfw\" (UniqueName: \"kubernetes.io/projected/64407a72-3fdf-450f-b5c0-913ee74bb437-kube-api-access-hktfw\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.341241 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-horizon-secret-key\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.341332 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-combined-ca-bundle\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.341891 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/64407a72-3fdf-450f-b5c0-913ee74bb437-config-data\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.342046 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/64407a72-3fdf-450f-b5c0-913ee74bb437-scripts\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.342326 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/64407a72-3fdf-450f-b5c0-913ee74bb437-logs\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.342704 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.347590 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-combined-ca-bundle\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.354930 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-horizon-secret-key\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.360764 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/64407a72-3fdf-450f-b5c0-913ee74bb437-horizon-tls-certs\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.374605 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hktfw\" (UniqueName: \"kubernetes.io/projected/64407a72-3fdf-450f-b5c0-913ee74bb437-kube-api-access-hktfw\") pod \"horizon-644f785f4-mslbg\" (UID: \"64407a72-3fdf-450f-b5c0-913ee74bb437\") " pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:52 crc kubenswrapper[4665]: I1205 01:31:52.482799 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:31:54 crc kubenswrapper[4665]: I1205 01:31:54.670541 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-zpx6p" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: connect: connection refused" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.212915 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.214207 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.239543 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"a394b847-cc03-44bf-bb3a-87f69cb6921f\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.239614 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-httpd-run\") pod \"a394b847-cc03-44bf-bb3a-87f69cb6921f\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.239671 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bhxzx\" (UniqueName: \"kubernetes.io/projected/a394b847-cc03-44bf-bb3a-87f69cb6921f-kube-api-access-bhxzx\") pod \"a394b847-cc03-44bf-bb3a-87f69cb6921f\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.239728 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-credential-keys\") pod \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.239814 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-combined-ca-bundle\") pod \"a394b847-cc03-44bf-bb3a-87f69cb6921f\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.239856 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-scripts\") pod \"a394b847-cc03-44bf-bb3a-87f69cb6921f\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.239887 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-combined-ca-bundle\") pod \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.239917 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-fernet-keys\") pod \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.240007 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-config-data\") pod \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.240046 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-config-data\") pod \"a394b847-cc03-44bf-bb3a-87f69cb6921f\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.240095 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-scripts\") pod \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.240124 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-logs\") pod \"a394b847-cc03-44bf-bb3a-87f69cb6921f\" (UID: \"a394b847-cc03-44bf-bb3a-87f69cb6921f\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.240169 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnjhd\" (UniqueName: \"kubernetes.io/projected/2c7e82db-d9a2-4e59-8613-e58e9abfce55-kube-api-access-jnjhd\") pod \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\" (UID: \"2c7e82db-d9a2-4e59-8613-e58e9abfce55\") " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.248485 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c7e82db-d9a2-4e59-8613-e58e9abfce55-kube-api-access-jnjhd" (OuterVolumeSpecName: "kube-api-access-jnjhd") pod "2c7e82db-d9a2-4e59-8613-e58e9abfce55" (UID: "2c7e82db-d9a2-4e59-8613-e58e9abfce55"). InnerVolumeSpecName "kube-api-access-jnjhd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.274015 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "a394b847-cc03-44bf-bb3a-87f69cb6921f" (UID: "a394b847-cc03-44bf-bb3a-87f69cb6921f"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.277046 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-logs" (OuterVolumeSpecName: "logs") pod "a394b847-cc03-44bf-bb3a-87f69cb6921f" (UID: "a394b847-cc03-44bf-bb3a-87f69cb6921f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.293400 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a394b847-cc03-44bf-bb3a-87f69cb6921f-kube-api-access-bhxzx" (OuterVolumeSpecName: "kube-api-access-bhxzx") pod "a394b847-cc03-44bf-bb3a-87f69cb6921f" (UID: "a394b847-cc03-44bf-bb3a-87f69cb6921f"). InnerVolumeSpecName "kube-api-access-bhxzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.295918 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-scripts" (OuterVolumeSpecName: "scripts") pod "a394b847-cc03-44bf-bb3a-87f69cb6921f" (UID: "a394b847-cc03-44bf-bb3a-87f69cb6921f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.296993 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-dqbqb" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.297045 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-dqbqb" event={"ID":"2c7e82db-d9a2-4e59-8613-e58e9abfce55","Type":"ContainerDied","Data":"a11a9cc5eba76cdaa0e1e0363cdb18acc372dc994945fd6df12e9ef368e095cf"} Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.297085 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a11a9cc5eba76cdaa0e1e0363cdb18acc372dc994945fd6df12e9ef368e095cf" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.300540 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2c7e82db-d9a2-4e59-8613-e58e9abfce55" (UID: "2c7e82db-d9a2-4e59-8613-e58e9abfce55"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.311515 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-scripts" (OuterVolumeSpecName: "scripts") pod "2c7e82db-d9a2-4e59-8613-e58e9abfce55" (UID: "2c7e82db-d9a2-4e59-8613-e58e9abfce55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.316400 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2c7e82db-d9a2-4e59-8613-e58e9abfce55" (UID: "2c7e82db-d9a2-4e59-8613-e58e9abfce55"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.318511 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"a394b847-cc03-44bf-bb3a-87f69cb6921f","Type":"ContainerDied","Data":"70a534f4e8fe3d46b155e1b688cbece03c368baf0cefe0a0e5cd5709a7d8f23c"} Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.318557 4665 scope.go:117] "RemoveContainer" containerID="26fc1a8a6c0d80a06ac855c6961a6d8729eef1c9a20ad2d6393091d212ab4c62" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.318719 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.327008 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "a394b847-cc03-44bf-bb3a-87f69cb6921f" (UID: "a394b847-cc03-44bf-bb3a-87f69cb6921f"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.342973 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.343023 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.343033 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnjhd\" (UniqueName: \"kubernetes.io/projected/2c7e82db-d9a2-4e59-8613-e58e9abfce55-kube-api-access-jnjhd\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.343059 4665 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.343069 4665 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/a394b847-cc03-44bf-bb3a-87f69cb6921f-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.343079 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bhxzx\" (UniqueName: \"kubernetes.io/projected/a394b847-cc03-44bf-bb3a-87f69cb6921f-kube-api-access-bhxzx\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.343087 4665 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.343095 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.343103 4665 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.354811 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-config-data" (OuterVolumeSpecName: "config-data") pod "a394b847-cc03-44bf-bb3a-87f69cb6921f" (UID: "a394b847-cc03-44bf-bb3a-87f69cb6921f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.369446 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2c7e82db-d9a2-4e59-8613-e58e9abfce55" (UID: "2c7e82db-d9a2-4e59-8613-e58e9abfce55"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.378796 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-config-data" (OuterVolumeSpecName: "config-data") pod "2c7e82db-d9a2-4e59-8613-e58e9abfce55" (UID: "2c7e82db-d9a2-4e59-8613-e58e9abfce55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.392419 4665 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.397025 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a394b847-cc03-44bf-bb3a-87f69cb6921f" (UID: "a394b847-cc03-44bf-bb3a-87f69cb6921f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.444907 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.444943 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.444957 4665 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.444969 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a394b847-cc03-44bf-bb3a-87f69cb6921f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.444984 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2c7e82db-d9a2-4e59-8613-e58e9abfce55-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.688926 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.697913 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.712804 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:31:57 crc kubenswrapper[4665]: E1205 01:31:57.713437 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c7e82db-d9a2-4e59-8613-e58e9abfce55" containerName="keystone-bootstrap" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.713463 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c7e82db-d9a2-4e59-8613-e58e9abfce55" containerName="keystone-bootstrap" Dec 05 01:31:57 crc kubenswrapper[4665]: E1205 01:31:57.713483 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerName="glance-log" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.713492 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerName="glance-log" Dec 05 01:31:57 crc kubenswrapper[4665]: E1205 01:31:57.713505 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerName="glance-httpd" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.713512 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerName="glance-httpd" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.713767 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c7e82db-d9a2-4e59-8613-e58e9abfce55" containerName="keystone-bootstrap" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.713797 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerName="glance-httpd" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.713808 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" containerName="glance-log" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.715393 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.729907 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.730067 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.733994 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.851319 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.851395 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.851459 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.851484 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.851532 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.851557 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.851575 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sq22f\" (UniqueName: \"kubernetes.io/projected/f7b0e53d-dba2-440d-844e-dd4ca34f1895-kube-api-access-sq22f\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.851595 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.953355 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.953398 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sq22f\" (UniqueName: \"kubernetes.io/projected/f7b0e53d-dba2-440d-844e-dd4ca34f1895-kube-api-access-sq22f\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.953424 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.953462 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.953500 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.953551 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.953573 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.953618 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.954007 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.954040 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.954535 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-logs\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.958234 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.960142 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-scripts\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.966196 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-config-data\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.968091 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.979722 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sq22f\" (UniqueName: \"kubernetes.io/projected/f7b0e53d-dba2-440d-844e-dd4ca34f1895-kube-api-access-sq22f\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:57 crc kubenswrapper[4665]: I1205 01:31:57.986498 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.053757 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.341253 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-dqbqb"] Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.350075 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-dqbqb"] Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.445436 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-xkf6f"] Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.446607 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.449573 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6ssd4" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.449626 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.449910 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.451504 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.456005 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.457439 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xkf6f"] Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.562793 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-config-data\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.562852 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-credential-keys\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.562925 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-combined-ca-bundle\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.562953 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cgbbd\" (UniqueName: \"kubernetes.io/projected/329fcb4a-b83f-4831-989a-584868907b9c-kube-api-access-cgbbd\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.563048 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-fernet-keys\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.563108 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-scripts\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.665867 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-fernet-keys\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.665933 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-scripts\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.665959 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-config-data\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.665982 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-credential-keys\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.666040 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-combined-ca-bundle\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.666061 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cgbbd\" (UniqueName: \"kubernetes.io/projected/329fcb4a-b83f-4831-989a-584868907b9c-kube-api-access-cgbbd\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.671952 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-combined-ca-bundle\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.672180 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-fernet-keys\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.672497 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-credential-keys\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.690084 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-scripts\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.690309 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cgbbd\" (UniqueName: \"kubernetes.io/projected/329fcb4a-b83f-4831-989a-584868907b9c-kube-api-access-cgbbd\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.690816 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-config-data\") pod \"keystone-bootstrap-xkf6f\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.769725 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.904251 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c7e82db-d9a2-4e59-8613-e58e9abfce55" path="/var/lib/kubelet/pods/2c7e82db-d9a2-4e59-8613-e58e9abfce55/volumes" Dec 05 01:31:58 crc kubenswrapper[4665]: I1205 01:31:58.905246 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a394b847-cc03-44bf-bb3a-87f69cb6921f" path="/var/lib/kubelet/pods/a394b847-cc03-44bf-bb3a-87f69cb6921f/volumes" Dec 05 01:32:03 crc kubenswrapper[4665]: E1205 01:32:03.940780 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 01:32:03 crc kubenswrapper[4665]: E1205 01:32:03.941423 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n5b4h65dh59bhbbh5dfh67dh669h558hf9h7bh5d7h95hc9h68bh576h7ch6ch68bh9ch5f6h586h676h57h67ch579h584h675hfbh557h574h57fh588q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dsmfm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-777959986f-xdlws_openstack(2f28d1d6-99d2-4793-a5da-305accdaad6f): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:32:03 crc kubenswrapper[4665]: E1205 01:32:03.949836 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-777959986f-xdlws" podUID="2f28d1d6-99d2-4793-a5da-305accdaad6f" Dec 05 01:32:04 crc kubenswrapper[4665]: E1205 01:32:04.019625 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 01:32:04 crc kubenswrapper[4665]: E1205 01:32:04.019899 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n95h667h559h6bh675h97h7fh87h697h689h66dh5bdh67bh8ch9dh5ffh96hd9h5f4h5fdh646h5ffh5f6h75h686h67bh98h64dh5d6h94h85h5ffq,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-nzxrv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-647f7cc89f-n8zf8_openstack(ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:32:04 crc kubenswrapper[4665]: E1205 01:32:04.021844 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-647f7cc89f-n8zf8" podUID="ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b" Dec 05 01:32:04 crc kubenswrapper[4665]: I1205 01:32:04.671809 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-zpx6p" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Dec 05 01:32:09 crc kubenswrapper[4665]: I1205 01:32:09.672286 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-zpx6p" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Dec 05 01:32:09 crc kubenswrapper[4665]: I1205 01:32:09.673002 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:32:14 crc kubenswrapper[4665]: I1205 01:32:14.673667 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-zpx6p" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Dec 05 01:32:15 crc kubenswrapper[4665]: I1205 01:32:15.509352 4665 generic.go:334] "Generic (PLEG): container finished" podID="a032ea63-dc16-4378-b848-9a4f1274f860" containerID="622e5e533523abb21b92b9f2a80cae4cce61914fe581467d07841e59ba2b7bc7" exitCode=0 Dec 05 01:32:15 crc kubenswrapper[4665]: I1205 01:32:15.509393 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-v6sj2" event={"ID":"a032ea63-dc16-4378-b848-9a4f1274f860","Type":"ContainerDied","Data":"622e5e533523abb21b92b9f2a80cae4cce61914fe581467d07841e59ba2b7bc7"} Dec 05 01:32:17 crc kubenswrapper[4665]: E1205 01:32:17.892553 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 01:32:17 crc kubenswrapper[4665]: E1205 01:32:17.893219 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ncch649h5f5hc6hcbh57bh99h57bh5d9h589h55fh56fh8dh579h557h695h5d9h5bfh589h79h56fh7h65fh5f6h55fh5b6h59fh67ch55dhc7h546h85q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5jtc2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-6449c7cf9c-cwrn8_openstack(5ec4949b-8f9e-4c8f-8070-546ec52b3213): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:32:17 crc kubenswrapper[4665]: E1205 01:32:17.896810 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-6449c7cf9c-cwrn8" podUID="5ec4949b-8f9e-4c8f-8070-546ec52b3213" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.042753 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.049685 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.076281 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-777959986f-xdlws" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174255 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-nb\") pod \"b63eb623-410f-4130-bf07-845b294c89f1\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174362 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-sb\") pod \"b63eb623-410f-4130-bf07-845b294c89f1\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174400 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzxrv\" (UniqueName: \"kubernetes.io/projected/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-kube-api-access-nzxrv\") pod \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174453 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-logs\") pod \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174476 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-config-data\") pod \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174497 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k7mw9\" (UniqueName: \"kubernetes.io/projected/b63eb623-410f-4130-bf07-845b294c89f1-kube-api-access-k7mw9\") pod \"b63eb623-410f-4130-bf07-845b294c89f1\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174582 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-horizon-secret-key\") pod \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174638 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-scripts\") pod \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\" (UID: \"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174660 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-dns-svc\") pod \"b63eb623-410f-4130-bf07-845b294c89f1\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174777 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-config\") pod \"b63eb623-410f-4130-bf07-845b294c89f1\" (UID: \"b63eb623-410f-4130-bf07-845b294c89f1\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.174860 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28d1d6-99d2-4793-a5da-305accdaad6f-logs\") pod \"2f28d1d6-99d2-4793-a5da-305accdaad6f\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.175324 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-scripts" (OuterVolumeSpecName: "scripts") pod "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b" (UID: "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.175663 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2f28d1d6-99d2-4793-a5da-305accdaad6f-logs" (OuterVolumeSpecName: "logs") pod "2f28d1d6-99d2-4793-a5da-305accdaad6f" (UID: "2f28d1d6-99d2-4793-a5da-305accdaad6f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.175693 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-logs" (OuterVolumeSpecName: "logs") pod "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b" (UID: "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.176192 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-config-data" (OuterVolumeSpecName: "config-data") pod "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b" (UID: "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.179369 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b" (UID: "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.179713 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b63eb623-410f-4130-bf07-845b294c89f1-kube-api-access-k7mw9" (OuterVolumeSpecName: "kube-api-access-k7mw9") pod "b63eb623-410f-4130-bf07-845b294c89f1" (UID: "b63eb623-410f-4130-bf07-845b294c89f1"). InnerVolumeSpecName "kube-api-access-k7mw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.191526 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-kube-api-access-nzxrv" (OuterVolumeSpecName: "kube-api-access-nzxrv") pod "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b" (UID: "ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b"). InnerVolumeSpecName "kube-api-access-nzxrv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.220664 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b63eb623-410f-4130-bf07-845b294c89f1" (UID: "b63eb623-410f-4130-bf07-845b294c89f1"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.226469 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-config" (OuterVolumeSpecName: "config") pod "b63eb623-410f-4130-bf07-845b294c89f1" (UID: "b63eb623-410f-4130-bf07-845b294c89f1"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.230958 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b63eb623-410f-4130-bf07-845b294c89f1" (UID: "b63eb623-410f-4130-bf07-845b294c89f1"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.252919 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b63eb623-410f-4130-bf07-845b294c89f1" (UID: "b63eb623-410f-4130-bf07-845b294c89f1"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.276316 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dsmfm\" (UniqueName: \"kubernetes.io/projected/2f28d1d6-99d2-4793-a5da-305accdaad6f-kube-api-access-dsmfm\") pod \"2f28d1d6-99d2-4793-a5da-305accdaad6f\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.276371 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-config-data\") pod \"2f28d1d6-99d2-4793-a5da-305accdaad6f\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.276568 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-scripts\") pod \"2f28d1d6-99d2-4793-a5da-305accdaad6f\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.276636 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2f28d1d6-99d2-4793-a5da-305accdaad6f-horizon-secret-key\") pod \"2f28d1d6-99d2-4793-a5da-305accdaad6f\" (UID: \"2f28d1d6-99d2-4793-a5da-305accdaad6f\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.276997 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277012 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzxrv\" (UniqueName: \"kubernetes.io/projected/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-kube-api-access-nzxrv\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277026 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277035 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277044 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k7mw9\" (UniqueName: \"kubernetes.io/projected/b63eb623-410f-4130-bf07-845b294c89f1-kube-api-access-k7mw9\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277052 4665 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277061 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277133 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277143 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277152 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2f28d1d6-99d2-4793-a5da-305accdaad6f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277161 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b63eb623-410f-4130-bf07-845b294c89f1-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.277603 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-config-data" (OuterVolumeSpecName: "config-data") pod "2f28d1d6-99d2-4793-a5da-305accdaad6f" (UID: "2f28d1d6-99d2-4793-a5da-305accdaad6f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.278340 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-scripts" (OuterVolumeSpecName: "scripts") pod "2f28d1d6-99d2-4793-a5da-305accdaad6f" (UID: "2f28d1d6-99d2-4793-a5da-305accdaad6f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.280827 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f28d1d6-99d2-4793-a5da-305accdaad6f-kube-api-access-dsmfm" (OuterVolumeSpecName: "kube-api-access-dsmfm") pod "2f28d1d6-99d2-4793-a5da-305accdaad6f" (UID: "2f28d1d6-99d2-4793-a5da-305accdaad6f"). InnerVolumeSpecName "kube-api-access-dsmfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.281908 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2f28d1d6-99d2-4793-a5da-305accdaad6f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "2f28d1d6-99d2-4793-a5da-305accdaad6f" (UID: "2f28d1d6-99d2-4793-a5da-305accdaad6f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.378609 4665 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2f28d1d6-99d2-4793-a5da-305accdaad6f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.378642 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dsmfm\" (UniqueName: \"kubernetes.io/projected/2f28d1d6-99d2-4793-a5da-305accdaad6f-kube-api-access-dsmfm\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.378656 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.378668 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2f28d1d6-99d2-4793-a5da-305accdaad6f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.540344 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-647f7cc89f-n8zf8" event={"ID":"ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b","Type":"ContainerDied","Data":"a9027ac21efbd53a4eb4da0f54dd7ae8ffce154de1b91d15a7452c5a0e0d77d2"} Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.540364 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-647f7cc89f-n8zf8" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.543823 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-698758b865-zpx6p" event={"ID":"b63eb623-410f-4130-bf07-845b294c89f1","Type":"ContainerDied","Data":"03332b114e0c63e745eff2bb3fa4cb9d0a33a096193e69729a43f0dae8275372"} Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.543930 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-698758b865-zpx6p" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.545921 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-777959986f-xdlws" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.545963 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-777959986f-xdlws" event={"ID":"2f28d1d6-99d2-4793-a5da-305accdaad6f","Type":"ContainerDied","Data":"fd29b27315f2efa1c501af155b7c96bab37801793801f99355497ff5524988db"} Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.597259 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-698758b865-zpx6p"] Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.615795 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-698758b865-zpx6p"] Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.649421 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-777959986f-xdlws"] Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.661672 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-777959986f-xdlws"] Dec 05 01:32:18 crc kubenswrapper[4665]: E1205 01:32:18.666432 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 01:32:18 crc kubenswrapper[4665]: E1205 01:32:18.666578 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vnwsx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-267l7_openstack(72b6fbf7-1bb1-45c3-97a1-61da90bd1a92): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:32:18 crc kubenswrapper[4665]: E1205 01:32:18.668769 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-267l7" podUID="72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.681393 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-647f7cc89f-n8zf8"] Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.687669 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-647f7cc89f-n8zf8"] Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.688081 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.788368 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2x2lm\" (UniqueName: \"kubernetes.io/projected/a032ea63-dc16-4378-b848-9a4f1274f860-kube-api-access-2x2lm\") pod \"a032ea63-dc16-4378-b848-9a4f1274f860\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.788414 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-config\") pod \"a032ea63-dc16-4378-b848-9a4f1274f860\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.800515 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a032ea63-dc16-4378-b848-9a4f1274f860-kube-api-access-2x2lm" (OuterVolumeSpecName: "kube-api-access-2x2lm") pod "a032ea63-dc16-4378-b848-9a4f1274f860" (UID: "a032ea63-dc16-4378-b848-9a4f1274f860"). InnerVolumeSpecName "kube-api-access-2x2lm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.823271 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-config" (OuterVolumeSpecName: "config") pod "a032ea63-dc16-4378-b848-9a4f1274f860" (UID: "a032ea63-dc16-4378-b848-9a4f1274f860"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.889355 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-combined-ca-bundle\") pod \"a032ea63-dc16-4378-b848-9a4f1274f860\" (UID: \"a032ea63-dc16-4378-b848-9a4f1274f860\") " Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.889836 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2x2lm\" (UniqueName: \"kubernetes.io/projected/a032ea63-dc16-4378-b848-9a4f1274f860-kube-api-access-2x2lm\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.889851 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.903580 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f28d1d6-99d2-4793-a5da-305accdaad6f" path="/var/lib/kubelet/pods/2f28d1d6-99d2-4793-a5da-305accdaad6f/volumes" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.904027 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b" path="/var/lib/kubelet/pods/ae9b33be-826b-4f18-a7bf-b0ce9f52cd9b/volumes" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.904675 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b63eb623-410f-4130-bf07-845b294c89f1" path="/var/lib/kubelet/pods/b63eb623-410f-4130-bf07-845b294c89f1/volumes" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.916814 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a032ea63-dc16-4378-b848-9a4f1274f860" (UID: "a032ea63-dc16-4378-b848-9a4f1274f860"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:18 crc kubenswrapper[4665]: I1205 01:32:18.991395 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a032ea63-dc16-4378-b848-9a4f1274f860-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.557122 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-v6sj2" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.557324 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-v6sj2" event={"ID":"a032ea63-dc16-4378-b848-9a4f1274f860","Type":"ContainerDied","Data":"0b82d339c396515b01062969fff95ea69fa6347a09ee4d84f605d6981b6caad7"} Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.557478 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0b82d339c396515b01062969fff95ea69fa6347a09ee4d84f605d6981b6caad7" Dec 05 01:32:19 crc kubenswrapper[4665]: E1205 01:32:19.558324 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-267l7" podUID="72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.674893 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-698758b865-zpx6p" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.113:5353: i/o timeout" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.959799 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-444px"] Dec 05 01:32:19 crc kubenswrapper[4665]: E1205 01:32:19.960594 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="init" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.960614 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="init" Dec 05 01:32:19 crc kubenswrapper[4665]: E1205 01:32:19.960638 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a032ea63-dc16-4378-b848-9a4f1274f860" containerName="neutron-db-sync" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.960645 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a032ea63-dc16-4378-b848-9a4f1274f860" containerName="neutron-db-sync" Dec 05 01:32:19 crc kubenswrapper[4665]: E1205 01:32:19.960667 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.960674 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.961026 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b63eb623-410f-4130-bf07-845b294c89f1" containerName="dnsmasq-dns" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.961076 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a032ea63-dc16-4378-b848-9a4f1274f860" containerName="neutron-db-sync" Dec 05 01:32:19 crc kubenswrapper[4665]: I1205 01:32:19.963261 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.013560 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-444px"] Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.125770 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.125911 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-config\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.125942 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnbzn\" (UniqueName: \"kubernetes.io/projected/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-kube-api-access-nnbzn\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.125999 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-svc\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.126029 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.126069 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.227174 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.227289 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-config\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.227342 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnbzn\" (UniqueName: \"kubernetes.io/projected/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-kube-api-access-nnbzn\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.227398 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-svc\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.227428 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.227462 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.228416 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-swift-storage-0\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.229108 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-sb\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.232769 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-config\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.234928 4665 scope.go:117] "RemoveContainer" containerID="ea36421967e1492d011510de0c1b7f2344fada03a02a56af90c650df129ae784" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.234958 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-svc\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.241395 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-nb\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.264230 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnbzn\" (UniqueName: \"kubernetes.io/projected/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-kube-api-access-nnbzn\") pod \"dnsmasq-dns-55f844cf75-444px\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: E1205 01:32:20.315735 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 01:32:20 crc kubenswrapper[4665]: E1205 01:32:20.315901 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-dm6fs,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-rkvs6_openstack(f2a58335-982b-42ff-933c-f93d38fbb197): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.316161 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-7c5886d9b4-c8rm2"] Dec 05 01:32:20 crc kubenswrapper[4665]: E1205 01:32:20.317195 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-rkvs6" podUID="f2a58335-982b-42ff-933c-f93d38fbb197" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.317610 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.334806 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.335088 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.336524 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-h4dhb" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.336714 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.358268 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c5886d9b4-c8rm2"] Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.386887 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.445888 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-httpd-config\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.446118 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-config\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.446155 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5c85\" (UniqueName: \"kubernetes.io/projected/f3c1e917-8b40-470c-88b8-5fa1a9c37665-kube-api-access-k5c85\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.446180 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-ovndb-tls-certs\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.446215 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-combined-ca-bundle\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.504477 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.534901 4665 scope.go:117] "RemoveContainer" containerID="724bd78baf4d982d3d09ab22b8646f289386612dd41914ff591e5e2aec6c09fb" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.551740 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-config\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.551797 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5c85\" (UniqueName: \"kubernetes.io/projected/f3c1e917-8b40-470c-88b8-5fa1a9c37665-kube-api-access-k5c85\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.551823 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-ovndb-tls-certs\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.551877 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-combined-ca-bundle\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.551933 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-httpd-config\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.568417 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-httpd-config\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.569857 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-ovndb-tls-certs\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.574627 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-config\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.575331 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-combined-ca-bundle\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.597136 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5c85\" (UniqueName: \"kubernetes.io/projected/f3c1e917-8b40-470c-88b8-5fa1a9c37665-kube-api-access-k5c85\") pod \"neutron-7c5886d9b4-c8rm2\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.614317 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-6449c7cf9c-cwrn8" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.614257 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-6449c7cf9c-cwrn8" event={"ID":"5ec4949b-8f9e-4c8f-8070-546ec52b3213","Type":"ContainerDied","Data":"75f37b24d2c1d569336bf552eb2e2718efe34ec6cd88446f0b385913f7847809"} Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.631993 4665 scope.go:117] "RemoveContainer" containerID="afbf4edaf46191dc5f10d5be8b1236caf66fc4379cc71d443a41de7f0f6ecdcd" Dec 05 01:32:20 crc kubenswrapper[4665]: E1205 01:32:20.650423 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-rkvs6" podUID="f2a58335-982b-42ff-933c-f93d38fbb197" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.653472 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ec4949b-8f9e-4c8f-8070-546ec52b3213-logs\") pod \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.653600 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-config-data\") pod \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.653960 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-scripts\") pod \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.654048 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5ec4949b-8f9e-4c8f-8070-546ec52b3213-horizon-secret-key\") pod \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.654070 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jtc2\" (UniqueName: \"kubernetes.io/projected/5ec4949b-8f9e-4c8f-8070-546ec52b3213-kube-api-access-5jtc2\") pod \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\" (UID: \"5ec4949b-8f9e-4c8f-8070-546ec52b3213\") " Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.657100 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-scripts" (OuterVolumeSpecName: "scripts") pod "5ec4949b-8f9e-4c8f-8070-546ec52b3213" (UID: "5ec4949b-8f9e-4c8f-8070-546ec52b3213"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.657458 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5ec4949b-8f9e-4c8f-8070-546ec52b3213-logs" (OuterVolumeSpecName: "logs") pod "5ec4949b-8f9e-4c8f-8070-546ec52b3213" (UID: "5ec4949b-8f9e-4c8f-8070-546ec52b3213"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.657570 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-config-data" (OuterVolumeSpecName: "config-data") pod "5ec4949b-8f9e-4c8f-8070-546ec52b3213" (UID: "5ec4949b-8f9e-4c8f-8070-546ec52b3213"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.659830 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ec4949b-8f9e-4c8f-8070-546ec52b3213-kube-api-access-5jtc2" (OuterVolumeSpecName: "kube-api-access-5jtc2") pod "5ec4949b-8f9e-4c8f-8070-546ec52b3213" (UID: "5ec4949b-8f9e-4c8f-8070-546ec52b3213"). InnerVolumeSpecName "kube-api-access-5jtc2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.695788 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ec4949b-8f9e-4c8f-8070-546ec52b3213-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "5ec4949b-8f9e-4c8f-8070-546ec52b3213" (UID: "5ec4949b-8f9e-4c8f-8070-546ec52b3213"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.776542 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.776571 4665 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/5ec4949b-8f9e-4c8f-8070-546ec52b3213-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.776585 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jtc2\" (UniqueName: \"kubernetes.io/projected/5ec4949b-8f9e-4c8f-8070-546ec52b3213-kube-api-access-5jtc2\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.776596 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5ec4949b-8f9e-4c8f-8070-546ec52b3213-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.776606 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/5ec4949b-8f9e-4c8f-8070-546ec52b3213-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.847624 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.933314 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:32:20 crc kubenswrapper[4665]: I1205 01:32:20.981839 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-6449c7cf9c-cwrn8"] Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.001925 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-6449c7cf9c-cwrn8"] Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.147032 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-644f785f4-mslbg"] Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.170032 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-86cd4c9876-glfvx"] Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.286363 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-xkf6f"] Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.310580 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.486906 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-444px"] Dec 05 01:32:21 crc kubenswrapper[4665]: W1205 01:32:21.495466 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb14532d1_bdd2_4576_8d6a_b94c7d3fc137.slice/crio-66e1e7245a47e4bb63fab24717a3fd5977e20a8fafc6026ed4ccd1a93dc95689 WatchSource:0}: Error finding container 66e1e7245a47e4bb63fab24717a3fd5977e20a8fafc6026ed4ccd1a93dc95689: Status 404 returned error can't find the container with id 66e1e7245a47e4bb63fab24717a3fd5977e20a8fafc6026ed4ccd1a93dc95689 Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.684228 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a1511a-dace-4cdd-85f9-39a189eacf7c","Type":"ContainerStarted","Data":"260feaf01578bf825dc2dae8b3b9fa886fc3e2193ed803e1bad1a9847fc62378"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.687856 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerStarted","Data":"4262c2a6d6acc16e3edda86aafa5487037d44b0b9374b4972573af34a7f5279b"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.690819 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644f785f4-mslbg" event={"ID":"64407a72-3fdf-450f-b5c0-913ee74bb437","Type":"ContainerStarted","Data":"a9d7dbbd1936634129883807bcd997247ee1f00044e85c92e41e38da75ca23df"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.690870 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-7c5886d9b4-c8rm2"] Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.692330 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-444px" event={"ID":"b14532d1-bdd2-4576-8d6a-b94c7d3fc137","Type":"ContainerStarted","Data":"66e1e7245a47e4bb63fab24717a3fd5977e20a8fafc6026ed4ccd1a93dc95689"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.695620 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zhgm4" event={"ID":"de47bb8c-ea83-4f9a-be28-5716b59d25ed","Type":"ContainerStarted","Data":"082c94ad7f4d5cff3d790e9fecb6cc2e99265db8f114c713f8ebc5d992827ee8"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.701545 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xkf6f" event={"ID":"329fcb4a-b83f-4831-989a-584868907b9c","Type":"ContainerStarted","Data":"e50e77e5d3507d5185c3652341c8aa63f89eb40a660322833cb28f5ee4b2993e"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.701592 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xkf6f" event={"ID":"329fcb4a-b83f-4831-989a-584868907b9c","Type":"ContainerStarted","Data":"eb58908f61ff797cdca513d613e1316466699d00b9e90ae43257d9793681a576"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.713339 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-86cd4c9876-glfvx" event={"ID":"2178a916-adc5-4ff5-8972-30b105320f5f","Type":"ContainerStarted","Data":"444032a90bc43a2efb352707374c096c88a6558c631b3ca70aff18e390775da3"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.727056 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7b0e53d-dba2-440d-844e-dd4ca34f1895","Type":"ContainerStarted","Data":"a8d923f5dfdfd280fafb5de790ad914697f31e701d8e026628b045b2e57021fe"} Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.729619 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-zhgm4" podStartSLOduration=6.740284506 podStartE2EDuration="43.729596955s" podCreationTimestamp="2025-12-05 01:31:38 +0000 UTC" firstStartedPulling="2025-12-05 01:31:40.92358825 +0000 UTC m=+1276.262980549" lastFinishedPulling="2025-12-05 01:32:17.912900699 +0000 UTC m=+1313.252292998" observedRunningTime="2025-12-05 01:32:21.708588331 +0000 UTC m=+1317.047980650" watchObservedRunningTime="2025-12-05 01:32:21.729596955 +0000 UTC m=+1317.068989254" Dec 05 01:32:21 crc kubenswrapper[4665]: I1205 01:32:21.740177 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-xkf6f" podStartSLOduration=23.740156978 podStartE2EDuration="23.740156978s" podCreationTimestamp="2025-12-05 01:31:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:21.739902432 +0000 UTC m=+1317.079294741" watchObservedRunningTime="2025-12-05 01:32:21.740156978 +0000 UTC m=+1317.079549277" Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.739277 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644f785f4-mslbg" event={"ID":"64407a72-3fdf-450f-b5c0-913ee74bb437","Type":"ContainerStarted","Data":"192c952cb000ba7e71ef83ad8a838fa9051aae01aa3845d94f45a9ee3b6821ac"} Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.743176 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c5886d9b4-c8rm2" event={"ID":"f3c1e917-8b40-470c-88b8-5fa1a9c37665","Type":"ContainerStarted","Data":"be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd"} Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.743214 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c5886d9b4-c8rm2" event={"ID":"f3c1e917-8b40-470c-88b8-5fa1a9c37665","Type":"ContainerStarted","Data":"82e5049ccfae1ab5128c022bbe2e3ac51264511bcf0deadac50003bc1a6870d3"} Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.746219 4665 generic.go:334] "Generic (PLEG): container finished" podID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" containerID="8258ff95ffefd31863b1177430013f69d3c657acedb8f275d783bcd27f324e88" exitCode=0 Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.746355 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-444px" event={"ID":"b14532d1-bdd2-4576-8d6a-b94c7d3fc137","Type":"ContainerDied","Data":"8258ff95ffefd31863b1177430013f69d3c657acedb8f275d783bcd27f324e88"} Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.750873 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-86cd4c9876-glfvx" event={"ID":"2178a916-adc5-4ff5-8972-30b105320f5f","Type":"ContainerStarted","Data":"0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31"} Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.753559 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7b0e53d-dba2-440d-844e-dd4ca34f1895","Type":"ContainerStarted","Data":"9fc8786212208b83cbcc0907c0b995c9bc5120c0f00dbf8af527933b72c60446"} Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.754960 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a1511a-dace-4cdd-85f9-39a189eacf7c","Type":"ContainerStarted","Data":"5c83a0ba54c84a14601b273a514ae7b3c9c644d5c392af546032fb2c3a6bd6b7"} Dec 05 01:32:22 crc kubenswrapper[4665]: I1205 01:32:22.938581 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ec4949b-8f9e-4c8f-8070-546ec52b3213" path="/var/lib/kubelet/pods/5ec4949b-8f9e-4c8f-8070-546ec52b3213/volumes" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.365158 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-5b484f7985-8qkjq"] Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.366875 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.380102 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.380340 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.456945 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5b484f7985-8qkjq"] Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.459627 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-combined-ca-bundle\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.459667 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-internal-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.459771 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cxdg5\" (UniqueName: \"kubernetes.io/projected/c6a6180f-d384-4015-8bf3-6563123c2f6a-kube-api-access-cxdg5\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.459813 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-public-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.459843 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-httpd-config\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.459868 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-config\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.459890 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-ovndb-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.566603 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cxdg5\" (UniqueName: \"kubernetes.io/projected/c6a6180f-d384-4015-8bf3-6563123c2f6a-kube-api-access-cxdg5\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.566655 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-public-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.566982 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-httpd-config\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.567468 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-config\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.567500 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-ovndb-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.567525 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-combined-ca-bundle\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.567541 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-internal-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.613356 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-httpd-config\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.614010 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-public-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.621001 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-ovndb-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.658718 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-config\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.659265 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-combined-ca-bundle\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.659375 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c6a6180f-d384-4015-8bf3-6563123c2f6a-internal-tls-certs\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.659943 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cxdg5\" (UniqueName: \"kubernetes.io/projected/c6a6180f-d384-4015-8bf3-6563123c2f6a-kube-api-access-cxdg5\") pod \"neutron-5b484f7985-8qkjq\" (UID: \"c6a6180f-d384-4015-8bf3-6563123c2f6a\") " pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.781954 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c5886d9b4-c8rm2" event={"ID":"f3c1e917-8b40-470c-88b8-5fa1a9c37665","Type":"ContainerStarted","Data":"bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef"} Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.782407 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.817486 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:23 crc kubenswrapper[4665]: I1205 01:32:23.822035 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-7c5886d9b4-c8rm2" podStartSLOduration=3.822016702 podStartE2EDuration="3.822016702s" podCreationTimestamp="2025-12-05 01:32:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:23.813753994 +0000 UTC m=+1319.153146293" watchObservedRunningTime="2025-12-05 01:32:23.822016702 +0000 UTC m=+1319.161409001" Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.685457 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-5b484f7985-8qkjq"] Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.794441 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5b484f7985-8qkjq" event={"ID":"c6a6180f-d384-4015-8bf3-6563123c2f6a","Type":"ContainerStarted","Data":"23c587e3bfc3f1a0ee4600dbab03d75da7c57ee30b390bc6f49910ea1cff709a"} Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.799722 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7b0e53d-dba2-440d-844e-dd4ca34f1895","Type":"ContainerStarted","Data":"c5f3896195bb34b2a0ba50833c289f7f2245667ea8c0b75853e9bbfdd54f163f"} Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.802584 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a1511a-dace-4cdd-85f9-39a189eacf7c","Type":"ContainerStarted","Data":"6f7bf95f01d7fe181f2675415ede9c1dfd8b91f7861a5467a34bb26e87d4d8b3"} Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.802717 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerName="glance-log" containerID="cri-o://5c83a0ba54c84a14601b273a514ae7b3c9c644d5c392af546032fb2c3a6bd6b7" gracePeriod=30 Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.802931 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerName="glance-httpd" containerID="cri-o://6f7bf95f01d7fe181f2675415ede9c1dfd8b91f7861a5467a34bb26e87d4d8b3" gracePeriod=30 Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.814634 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerStarted","Data":"eb59fe1d1b872f3c8fc8b79d8d750264f43eac4650a8ce7a479a026dcaf2977e"} Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.822942 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644f785f4-mslbg" event={"ID":"64407a72-3fdf-450f-b5c0-913ee74bb437","Type":"ContainerStarted","Data":"0d22edabd62a01b81060d36b7b04935aa12a247013b11f7437c9d25fbca25bbb"} Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.840386 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-444px" event={"ID":"b14532d1-bdd2-4576-8d6a-b94c7d3fc137","Type":"ContainerStarted","Data":"f513d9636eeced8ea3fe6a12e8c145314d7fb5360a09a4cb66f7bc867f2dcc80"} Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.841115 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.844953 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-86cd4c9876-glfvx" event={"ID":"2178a916-adc5-4ff5-8972-30b105320f5f","Type":"ContainerStarted","Data":"5fee73ab6bf34e37ceb73d73cdc466ea8680df6b74400f1cd845b17c4378965f"} Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.855953 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=27.855929606 podStartE2EDuration="27.855929606s" podCreationTimestamp="2025-12-05 01:31:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:24.819549733 +0000 UTC m=+1320.158942022" watchObservedRunningTime="2025-12-05 01:32:24.855929606 +0000 UTC m=+1320.195321905" Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.884280 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=37.884261416 podStartE2EDuration="37.884261416s" podCreationTimestamp="2025-12-05 01:31:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:24.870935736 +0000 UTC m=+1320.210328035" watchObservedRunningTime="2025-12-05 01:32:24.884261416 +0000 UTC m=+1320.223653715" Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.959934 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-644f785f4-mslbg" podStartSLOduration=32.438505603 podStartE2EDuration="32.959911972s" podCreationTimestamp="2025-12-05 01:31:52 +0000 UTC" firstStartedPulling="2025-12-05 01:32:21.181222719 +0000 UTC m=+1316.520615018" lastFinishedPulling="2025-12-05 01:32:21.702629088 +0000 UTC m=+1317.042021387" observedRunningTime="2025-12-05 01:32:24.941707405 +0000 UTC m=+1320.281099704" watchObservedRunningTime="2025-12-05 01:32:24.959911972 +0000 UTC m=+1320.299304271" Dec 05 01:32:24 crc kubenswrapper[4665]: I1205 01:32:24.972763 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-55f844cf75-444px" podStartSLOduration=5.97273454 podStartE2EDuration="5.97273454s" podCreationTimestamp="2025-12-05 01:32:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:24.966495701 +0000 UTC m=+1320.305888000" watchObservedRunningTime="2025-12-05 01:32:24.97273454 +0000 UTC m=+1320.312126839" Dec 05 01:32:25 crc kubenswrapper[4665]: I1205 01:32:25.028676 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-86cd4c9876-glfvx" podStartSLOduration=33.559335635 podStartE2EDuration="34.028657633s" podCreationTimestamp="2025-12-05 01:31:51 +0000 UTC" firstStartedPulling="2025-12-05 01:32:21.326490197 +0000 UTC m=+1316.665882496" lastFinishedPulling="2025-12-05 01:32:21.795812195 +0000 UTC m=+1317.135204494" observedRunningTime="2025-12-05 01:32:24.995781734 +0000 UTC m=+1320.335174033" watchObservedRunningTime="2025-12-05 01:32:25.028657633 +0000 UTC m=+1320.368049932" Dec 05 01:32:25 crc kubenswrapper[4665]: I1205 01:32:25.855069 4665 generic.go:334] "Generic (PLEG): container finished" podID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerID="6f7bf95f01d7fe181f2675415ede9c1dfd8b91f7861a5467a34bb26e87d4d8b3" exitCode=143 Dec 05 01:32:25 crc kubenswrapper[4665]: I1205 01:32:25.855434 4665 generic.go:334] "Generic (PLEG): container finished" podID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerID="5c83a0ba54c84a14601b273a514ae7b3c9c644d5c392af546032fb2c3a6bd6b7" exitCode=143 Dec 05 01:32:25 crc kubenswrapper[4665]: I1205 01:32:25.855160 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a1511a-dace-4cdd-85f9-39a189eacf7c","Type":"ContainerDied","Data":"6f7bf95f01d7fe181f2675415ede9c1dfd8b91f7861a5467a34bb26e87d4d8b3"} Dec 05 01:32:25 crc kubenswrapper[4665]: I1205 01:32:25.855574 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a1511a-dace-4cdd-85f9-39a189eacf7c","Type":"ContainerDied","Data":"5c83a0ba54c84a14601b273a514ae7b3c9c644d5c392af546032fb2c3a6bd6b7"} Dec 05 01:32:27 crc kubenswrapper[4665]: I1205 01:32:27.872084 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5b484f7985-8qkjq" event={"ID":"c6a6180f-d384-4015-8bf3-6563123c2f6a","Type":"ContainerStarted","Data":"016feb77ccbd3faa54fa255ca187a0a26f6f6077621ff31d0ff878e868d23957"} Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.054656 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.054973 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.054991 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.055010 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.083795 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.091613 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.891519 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"b1a1511a-dace-4cdd-85f9-39a189eacf7c","Type":"ContainerDied","Data":"260feaf01578bf825dc2dae8b3b9fa886fc3e2193ed803e1bad1a9847fc62378"} Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.891807 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="260feaf01578bf825dc2dae8b3b9fa886fc3e2193ed803e1bad1a9847fc62378" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.901656 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.980065 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-combined-ca-bundle\") pod \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.980146 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-scripts\") pod \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.980175 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-httpd-run\") pod \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.980236 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v9hqm\" (UniqueName: \"kubernetes.io/projected/b1a1511a-dace-4cdd-85f9-39a189eacf7c-kube-api-access-v9hqm\") pod \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.980322 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-config-data\") pod \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.980473 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-logs\") pod \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.980509 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\" (UID: \"b1a1511a-dace-4cdd-85f9-39a189eacf7c\") " Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.982392 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-logs" (OuterVolumeSpecName: "logs") pod "b1a1511a-dace-4cdd-85f9-39a189eacf7c" (UID: "b1a1511a-dace-4cdd-85f9-39a189eacf7c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:32:28 crc kubenswrapper[4665]: I1205 01:32:28.982819 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "b1a1511a-dace-4cdd-85f9-39a189eacf7c" (UID: "b1a1511a-dace-4cdd-85f9-39a189eacf7c"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.004899 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1a1511a-dace-4cdd-85f9-39a189eacf7c-kube-api-access-v9hqm" (OuterVolumeSpecName: "kube-api-access-v9hqm") pod "b1a1511a-dace-4cdd-85f9-39a189eacf7c" (UID: "b1a1511a-dace-4cdd-85f9-39a189eacf7c"). InnerVolumeSpecName "kube-api-access-v9hqm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.006448 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-scripts" (OuterVolumeSpecName: "scripts") pod "b1a1511a-dace-4cdd-85f9-39a189eacf7c" (UID: "b1a1511a-dace-4cdd-85f9-39a189eacf7c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.010800 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "b1a1511a-dace-4cdd-85f9-39a189eacf7c" (UID: "b1a1511a-dace-4cdd-85f9-39a189eacf7c"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.034605 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b1a1511a-dace-4cdd-85f9-39a189eacf7c" (UID: "b1a1511a-dace-4cdd-85f9-39a189eacf7c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.083397 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-config-data" (OuterVolumeSpecName: "config-data") pod "b1a1511a-dace-4cdd-85f9-39a189eacf7c" (UID: "b1a1511a-dace-4cdd-85f9-39a189eacf7c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.085983 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.086033 4665 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.086045 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.086056 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.086066 4665 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/b1a1511a-dace-4cdd-85f9-39a189eacf7c-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.086074 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v9hqm\" (UniqueName: \"kubernetes.io/projected/b1a1511a-dace-4cdd-85f9-39a189eacf7c-kube-api-access-v9hqm\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.086084 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b1a1511a-dace-4cdd-85f9-39a189eacf7c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.110254 4665 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.188049 4665 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.910562 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-5b484f7985-8qkjq" event={"ID":"c6a6180f-d384-4015-8bf3-6563123c2f6a","Type":"ContainerStarted","Data":"9b34511fc2de87d862b6db007188df973bd13fb3fe295276000594a6866c4b69"} Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.910643 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.917129 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.949752 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-5b484f7985-8qkjq" podStartSLOduration=6.949729756 podStartE2EDuration="6.949729756s" podCreationTimestamp="2025-12-05 01:32:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:29.935799831 +0000 UTC m=+1325.275192130" watchObservedRunningTime="2025-12-05 01:32:29.949729756 +0000 UTC m=+1325.289122055" Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.969324 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:32:29 crc kubenswrapper[4665]: I1205 01:32:29.983892 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.007131 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:32:30 crc kubenswrapper[4665]: E1205 01:32:30.007664 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerName="glance-log" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.007681 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerName="glance-log" Dec 05 01:32:30 crc kubenswrapper[4665]: E1205 01:32:30.007704 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerName="glance-httpd" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.007711 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerName="glance-httpd" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.007893 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerName="glance-log" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.007909 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" containerName="glance-httpd" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.008779 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.041660 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.041926 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.077363 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.127397 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-config-data\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.127453 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.127492 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.127513 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.127548 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.127577 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-scripts\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.127637 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vjxwc\" (UniqueName: \"kubernetes.io/projected/aba4deca-cc3e-4589-9f3a-f7149cf107c7-kube-api-access-vjxwc\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.127652 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-logs\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.229446 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vjxwc\" (UniqueName: \"kubernetes.io/projected/aba4deca-cc3e-4589-9f3a-f7149cf107c7-kube-api-access-vjxwc\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.229485 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-logs\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.229569 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-config-data\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.229586 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.229607 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.229626 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.229651 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.229672 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-scripts\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.231803 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-logs\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.232194 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.232419 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.237660 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.243547 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-config-data\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.243962 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-scripts\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.256670 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.264733 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vjxwc\" (UniqueName: \"kubernetes.io/projected/aba4deca-cc3e-4589-9f3a-f7149cf107c7-kube-api-access-vjxwc\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.290878 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.371896 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.389473 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.527650 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-d8qlp"] Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.527867 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" podUID="bc826875-4d2d-4dd2-a827-852a00d33450" containerName="dnsmasq-dns" containerID="cri-o://975581cab7f88367f9be540441a904f7f91637db276177605072f908ceaa3a20" gracePeriod=10 Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.909436 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b1a1511a-dace-4cdd-85f9-39a189eacf7c" path="/var/lib/kubelet/pods/b1a1511a-dace-4cdd-85f9-39a189eacf7c/volumes" Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.930194 4665 generic.go:334] "Generic (PLEG): container finished" podID="de47bb8c-ea83-4f9a-be28-5716b59d25ed" containerID="082c94ad7f4d5cff3d790e9fecb6cc2e99265db8f114c713f8ebc5d992827ee8" exitCode=0 Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.930253 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zhgm4" event={"ID":"de47bb8c-ea83-4f9a-be28-5716b59d25ed","Type":"ContainerDied","Data":"082c94ad7f4d5cff3d790e9fecb6cc2e99265db8f114c713f8ebc5d992827ee8"} Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.932784 4665 generic.go:334] "Generic (PLEG): container finished" podID="bc826875-4d2d-4dd2-a827-852a00d33450" containerID="975581cab7f88367f9be540441a904f7f91637db276177605072f908ceaa3a20" exitCode=0 Dec 05 01:32:30 crc kubenswrapper[4665]: I1205 01:32:30.932856 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" event={"ID":"bc826875-4d2d-4dd2-a827-852a00d33450","Type":"ContainerDied","Data":"975581cab7f88367f9be540441a904f7f91637db276177605072f908ceaa3a20"} Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.205237 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.251999 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.266789 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-config\") pod \"bc826875-4d2d-4dd2-a827-852a00d33450\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.266921 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n4rb2\" (UniqueName: \"kubernetes.io/projected/bc826875-4d2d-4dd2-a827-852a00d33450-kube-api-access-n4rb2\") pod \"bc826875-4d2d-4dd2-a827-852a00d33450\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.266988 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-swift-storage-0\") pod \"bc826875-4d2d-4dd2-a827-852a00d33450\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.267013 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-sb\") pod \"bc826875-4d2d-4dd2-a827-852a00d33450\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.267039 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-nb\") pod \"bc826875-4d2d-4dd2-a827-852a00d33450\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.267137 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-svc\") pod \"bc826875-4d2d-4dd2-a827-852a00d33450\" (UID: \"bc826875-4d2d-4dd2-a827-852a00d33450\") " Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.273499 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc826875-4d2d-4dd2-a827-852a00d33450-kube-api-access-n4rb2" (OuterVolumeSpecName: "kube-api-access-n4rb2") pod "bc826875-4d2d-4dd2-a827-852a00d33450" (UID: "bc826875-4d2d-4dd2-a827-852a00d33450"). InnerVolumeSpecName "kube-api-access-n4rb2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.370123 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n4rb2\" (UniqueName: \"kubernetes.io/projected/bc826875-4d2d-4dd2-a827-852a00d33450-kube-api-access-n4rb2\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.430928 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "bc826875-4d2d-4dd2-a827-852a00d33450" (UID: "bc826875-4d2d-4dd2-a827-852a00d33450"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.471831 4665 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.482400 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-config" (OuterVolumeSpecName: "config") pod "bc826875-4d2d-4dd2-a827-852a00d33450" (UID: "bc826875-4d2d-4dd2-a827-852a00d33450"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.575148 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.603650 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "bc826875-4d2d-4dd2-a827-852a00d33450" (UID: "bc826875-4d2d-4dd2-a827-852a00d33450"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.613863 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "bc826875-4d2d-4dd2-a827-852a00d33450" (UID: "bc826875-4d2d-4dd2-a827-852a00d33450"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.614950 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "bc826875-4d2d-4dd2-a827-852a00d33450" (UID: "bc826875-4d2d-4dd2-a827-852a00d33450"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.685518 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.685547 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.685557 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/bc826875-4d2d-4dd2-a827-852a00d33450-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.978061 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aba4deca-cc3e-4589-9f3a-f7149cf107c7","Type":"ContainerStarted","Data":"d4d3541b87a2f83eaa3a665edaaa839cb0d187ceb12f564ede805c0b2310ece6"} Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.983357 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" event={"ID":"bc826875-4d2d-4dd2-a827-852a00d33450","Type":"ContainerDied","Data":"f2c6a4bc8e2baedf98c5ef88496b9d8c0905093f7821110b993a6d4d1dc4ab93"} Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.983405 4665 scope.go:117] "RemoveContainer" containerID="975581cab7f88367f9be540441a904f7f91637db276177605072f908ceaa3a20" Dec 05 01:32:31 crc kubenswrapper[4665]: I1205 01:32:31.983512 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-785d8bcb8c-d8qlp" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.032938 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-d8qlp"] Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.058199 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-785d8bcb8c-d8qlp"] Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.078465 4665 scope.go:117] "RemoveContainer" containerID="c8b95daad92f45b4657d8bfcc7eaf749d8c87dda64698db67ae93e86362cdbbc" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.343575 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.344411 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.398692 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zhgm4" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.484791 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.485520 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.498077 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-config-data\") pod \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.498126 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de47bb8c-ea83-4f9a-be28-5716b59d25ed-logs\") pod \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.498167 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-combined-ca-bundle\") pod \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.498265 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-scripts\") pod \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.498324 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bghqz\" (UniqueName: \"kubernetes.io/projected/de47bb8c-ea83-4f9a-be28-5716b59d25ed-kube-api-access-bghqz\") pod \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\" (UID: \"de47bb8c-ea83-4f9a-be28-5716b59d25ed\") " Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.499608 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de47bb8c-ea83-4f9a-be28-5716b59d25ed-logs" (OuterVolumeSpecName: "logs") pod "de47bb8c-ea83-4f9a-be28-5716b59d25ed" (UID: "de47bb8c-ea83-4f9a-be28-5716b59d25ed"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.504790 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de47bb8c-ea83-4f9a-be28-5716b59d25ed-kube-api-access-bghqz" (OuterVolumeSpecName: "kube-api-access-bghqz") pod "de47bb8c-ea83-4f9a-be28-5716b59d25ed" (UID: "de47bb8c-ea83-4f9a-be28-5716b59d25ed"). InnerVolumeSpecName "kube-api-access-bghqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.513906 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-scripts" (OuterVolumeSpecName: "scripts") pod "de47bb8c-ea83-4f9a-be28-5716b59d25ed" (UID: "de47bb8c-ea83-4f9a-be28-5716b59d25ed"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.580530 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-config-data" (OuterVolumeSpecName: "config-data") pod "de47bb8c-ea83-4f9a-be28-5716b59d25ed" (UID: "de47bb8c-ea83-4f9a-be28-5716b59d25ed"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.601808 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.601835 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/de47bb8c-ea83-4f9a-be28-5716b59d25ed-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.601846 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.601854 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bghqz\" (UniqueName: \"kubernetes.io/projected/de47bb8c-ea83-4f9a-be28-5716b59d25ed-kube-api-access-bghqz\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.606473 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "de47bb8c-ea83-4f9a-be28-5716b59d25ed" (UID: "de47bb8c-ea83-4f9a-be28-5716b59d25ed"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.703145 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/de47bb8c-ea83-4f9a-be28-5716b59d25ed-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:32 crc kubenswrapper[4665]: I1205 01:32:32.917507 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc826875-4d2d-4dd2-a827-852a00d33450" path="/var/lib/kubelet/pods/bc826875-4d2d-4dd2-a827-852a00d33450/volumes" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.072659 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-zhgm4" event={"ID":"de47bb8c-ea83-4f9a-be28-5716b59d25ed","Type":"ContainerDied","Data":"66695e214d82faf35292bd21e901d0eccb28a52dc2a415910624784178b1a0c9"} Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.072718 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="66695e214d82faf35292bd21e901d0eccb28a52dc2a415910624784178b1a0c9" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.072817 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-zhgm4" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.082968 4665 generic.go:334] "Generic (PLEG): container finished" podID="329fcb4a-b83f-4831-989a-584868907b9c" containerID="e50e77e5d3507d5185c3652341c8aa63f89eb40a660322833cb28f5ee4b2993e" exitCode=0 Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.083058 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xkf6f" event={"ID":"329fcb4a-b83f-4831-989a-584868907b9c","Type":"ContainerDied","Data":"e50e77e5d3507d5185c3652341c8aa63f89eb40a660322833cb28f5ee4b2993e"} Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.092251 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aba4deca-cc3e-4589-9f3a-f7149cf107c7","Type":"ContainerStarted","Data":"7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e"} Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.104364 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-7c79d9c44-5ps46"] Dec 05 01:32:33 crc kubenswrapper[4665]: E1205 01:32:33.104802 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc826875-4d2d-4dd2-a827-852a00d33450" containerName="dnsmasq-dns" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.104814 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc826875-4d2d-4dd2-a827-852a00d33450" containerName="dnsmasq-dns" Dec 05 01:32:33 crc kubenswrapper[4665]: E1205 01:32:33.104830 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de47bb8c-ea83-4f9a-be28-5716b59d25ed" containerName="placement-db-sync" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.104836 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="de47bb8c-ea83-4f9a-be28-5716b59d25ed" containerName="placement-db-sync" Dec 05 01:32:33 crc kubenswrapper[4665]: E1205 01:32:33.104849 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bc826875-4d2d-4dd2-a827-852a00d33450" containerName="init" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.104855 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="bc826875-4d2d-4dd2-a827-852a00d33450" containerName="init" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.105035 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="bc826875-4d2d-4dd2-a827-852a00d33450" containerName="dnsmasq-dns" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.105062 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="de47bb8c-ea83-4f9a-be28-5716b59d25ed" containerName="placement-db-sync" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.106445 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.109724 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-v4657" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.110980 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.111125 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.112788 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.127203 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7c79d9c44-5ps46"] Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.133431 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.215453 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-scripts\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.215495 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-combined-ca-bundle\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.215523 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-public-tls-certs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.215591 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-config-data\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.215607 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52283875-2314-426a-b5ff-77a8b000f4cc-logs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.215726 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-64djl\" (UniqueName: \"kubernetes.io/projected/52283875-2314-426a-b5ff-77a8b000f4cc-kube-api-access-64djl\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.215763 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-internal-tls-certs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.317433 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-64djl\" (UniqueName: \"kubernetes.io/projected/52283875-2314-426a-b5ff-77a8b000f4cc-kube-api-access-64djl\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.317499 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-internal-tls-certs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.317546 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-combined-ca-bundle\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.317563 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-scripts\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.317586 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-public-tls-certs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.317619 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-config-data\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.317636 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52283875-2314-426a-b5ff-77a8b000f4cc-logs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.319133 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/52283875-2314-426a-b5ff-77a8b000f4cc-logs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.325239 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-internal-tls-certs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.325825 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-public-tls-certs\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.329600 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-scripts\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.336940 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-combined-ca-bundle\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.338382 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/52283875-2314-426a-b5ff-77a8b000f4cc-config-data\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.354348 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-64djl\" (UniqueName: \"kubernetes.io/projected/52283875-2314-426a-b5ff-77a8b000f4cc-kube-api-access-64djl\") pod \"placement-7c79d9c44-5ps46\" (UID: \"52283875-2314-426a-b5ff-77a8b000f4cc\") " pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:33 crc kubenswrapper[4665]: I1205 01:32:33.449626 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:34 crc kubenswrapper[4665]: I1205 01:32:34.102319 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aba4deca-cc3e-4589-9f3a-f7149cf107c7","Type":"ContainerStarted","Data":"1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e"} Dec 05 01:32:34 crc kubenswrapper[4665]: I1205 01:32:34.134009 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.133994597 podStartE2EDuration="5.133994597s" podCreationTimestamp="2025-12-05 01:32:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:34.13034575 +0000 UTC m=+1329.469738049" watchObservedRunningTime="2025-12-05 01:32:34.133994597 +0000 UTC m=+1329.473386896" Dec 05 01:32:34 crc kubenswrapper[4665]: I1205 01:32:34.977476 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 01:32:35 crc kubenswrapper[4665]: I1205 01:32:35.047862 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 01:32:40 crc kubenswrapper[4665]: I1205 01:32:40.373322 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 01:32:40 crc kubenswrapper[4665]: I1205 01:32:40.373877 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 01:32:40 crc kubenswrapper[4665]: I1205 01:32:40.414060 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 01:32:40 crc kubenswrapper[4665]: I1205 01:32:40.416482 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.103060 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.251727 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-config-data\") pod \"329fcb4a-b83f-4831-989a-584868907b9c\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.251775 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-scripts\") pod \"329fcb4a-b83f-4831-989a-584868907b9c\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.251881 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-credential-keys\") pod \"329fcb4a-b83f-4831-989a-584868907b9c\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.251942 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-fernet-keys\") pod \"329fcb4a-b83f-4831-989a-584868907b9c\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.251970 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cgbbd\" (UniqueName: \"kubernetes.io/projected/329fcb4a-b83f-4831-989a-584868907b9c-kube-api-access-cgbbd\") pod \"329fcb4a-b83f-4831-989a-584868907b9c\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.252042 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-combined-ca-bundle\") pod \"329fcb4a-b83f-4831-989a-584868907b9c\" (UID: \"329fcb4a-b83f-4831-989a-584868907b9c\") " Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.270604 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/329fcb4a-b83f-4831-989a-584868907b9c-kube-api-access-cgbbd" (OuterVolumeSpecName: "kube-api-access-cgbbd") pod "329fcb4a-b83f-4831-989a-584868907b9c" (UID: "329fcb4a-b83f-4831-989a-584868907b9c"). InnerVolumeSpecName "kube-api-access-cgbbd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.281903 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "329fcb4a-b83f-4831-989a-584868907b9c" (UID: "329fcb4a-b83f-4831-989a-584868907b9c"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.281929 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-scripts" (OuterVolumeSpecName: "scripts") pod "329fcb4a-b83f-4831-989a-584868907b9c" (UID: "329fcb4a-b83f-4831-989a-584868907b9c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.303509 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-xkf6f" event={"ID":"329fcb4a-b83f-4831-989a-584868907b9c","Type":"ContainerDied","Data":"eb58908f61ff797cdca513d613e1316466699d00b9e90ae43257d9793681a576"} Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.303716 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb58908f61ff797cdca513d613e1316466699d00b9e90ae43257d9793681a576" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.303736 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.303798 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-xkf6f" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.304646 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.308995 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "329fcb4a-b83f-4831-989a-584868907b9c" (UID: "329fcb4a-b83f-4831-989a-584868907b9c"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.332735 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "329fcb4a-b83f-4831-989a-584868907b9c" (UID: "329fcb4a-b83f-4831-989a-584868907b9c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.343466 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-config-data" (OuterVolumeSpecName: "config-data") pod "329fcb4a-b83f-4831-989a-584868907b9c" (UID: "329fcb4a-b83f-4831-989a-584868907b9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.353839 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cgbbd\" (UniqueName: \"kubernetes.io/projected/329fcb4a-b83f-4831-989a-584868907b9c-kube-api-access-cgbbd\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.353870 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.353880 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.353888 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.353897 4665 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.353904 4665 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/329fcb4a-b83f-4831-989a-584868907b9c-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:41 crc kubenswrapper[4665]: I1205 01:32:41.854732 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-7c79d9c44-5ps46"] Dec 05 01:32:41 crc kubenswrapper[4665]: W1205 01:32:41.869625 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod52283875_2314_426a_b5ff_77a8b000f4cc.slice/crio-556982a2fc891b7b4279f83df48bc6673a6c1e4cc600974991dc8ad8ea385e80 WatchSource:0}: Error finding container 556982a2fc891b7b4279f83df48bc6673a6c1e4cc600974991dc8ad8ea385e80: Status 404 returned error can't find the container with id 556982a2fc891b7b4279f83df48bc6673a6c1e4cc600974991dc8ad8ea385e80 Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.236594 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-79f86c7bd7-c4mss"] Dec 05 01:32:42 crc kubenswrapper[4665]: E1205 01:32:42.281041 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="329fcb4a-b83f-4831-989a-584868907b9c" containerName="keystone-bootstrap" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.281085 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="329fcb4a-b83f-4831-989a-584868907b9c" containerName="keystone-bootstrap" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.281407 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="329fcb4a-b83f-4831-989a-584868907b9c" containerName="keystone-bootstrap" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.281987 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-79f86c7bd7-c4mss"] Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.282086 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.286583 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.286870 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.287027 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.287216 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.287528 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.287651 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-6ssd4" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.352200 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-86cd4c9876-glfvx" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.388518 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c79d9c44-5ps46" event={"ID":"52283875-2314-426a-b5ff-77a8b000f4cc","Type":"ContainerStarted","Data":"556982a2fc891b7b4279f83df48bc6673a6c1e4cc600974991dc8ad8ea385e80"} Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.389610 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-combined-ca-bundle\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.389707 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-scripts\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.389795 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-internal-tls-certs\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.389871 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bscqg\" (UniqueName: \"kubernetes.io/projected/4d1f4f51-e293-418f-a305-a7699a6cb866-kube-api-access-bscqg\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.389945 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-public-tls-certs\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.390027 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-credential-keys\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.390131 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-config-data\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.390229 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-fernet-keys\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.421552 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerStarted","Data":"4beef3c7d2d461d99f18a2ec654e3e26b1e5f78b1a90255d2bd4ba7feee12382"} Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.438869 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-267l7" event={"ID":"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92","Type":"ContainerStarted","Data":"4a837b7b6687971d541d6c711a9db225e2fb3d0353b364e895c3398c57d1df65"} Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.484865 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-267l7" podStartSLOduration=4.192947155 podStartE2EDuration="1m4.484845904s" podCreationTimestamp="2025-12-05 01:31:38 +0000 UTC" firstStartedPulling="2025-12-05 01:31:40.954996473 +0000 UTC m=+1276.294388782" lastFinishedPulling="2025-12-05 01:32:41.246895232 +0000 UTC m=+1336.586287531" observedRunningTime="2025-12-05 01:32:42.469797383 +0000 UTC m=+1337.809189682" watchObservedRunningTime="2025-12-05 01:32:42.484845904 +0000 UTC m=+1337.824238203" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.489456 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-644f785f4-mslbg" podUID="64407a72-3fdf-450f-b5c0-913ee74bb437" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.494325 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-config-data\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.494633 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-fernet-keys\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.494808 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-combined-ca-bundle\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.494910 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-scripts\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.495035 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-internal-tls-certs\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.495142 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bscqg\" (UniqueName: \"kubernetes.io/projected/4d1f4f51-e293-418f-a305-a7699a6cb866-kube-api-access-bscqg\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.495234 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-public-tls-certs\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.496141 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-credential-keys\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.511111 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-fernet-keys\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.512861 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-scripts\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.520927 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-internal-tls-certs\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.534649 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-public-tls-certs\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.534914 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-credential-keys\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.547133 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-config-data\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.547853 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bscqg\" (UniqueName: \"kubernetes.io/projected/4d1f4f51-e293-418f-a305-a7699a6cb866-kube-api-access-bscqg\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.547905 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4d1f4f51-e293-418f-a305-a7699a6cb866-combined-ca-bundle\") pod \"keystone-79f86c7bd7-c4mss\" (UID: \"4d1f4f51-e293-418f-a305-a7699a6cb866\") " pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:42 crc kubenswrapper[4665]: I1205 01:32:42.633257 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.143962 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-79f86c7bd7-c4mss"] Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.459251 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-79f86c7bd7-c4mss" event={"ID":"4d1f4f51-e293-418f-a305-a7699a6cb866","Type":"ContainerStarted","Data":"c13c4ed284b259491c0b4e1c855ef62af9998796dd823910a3e696e55367760f"} Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.470228 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rkvs6" event={"ID":"f2a58335-982b-42ff-933c-f93d38fbb197","Type":"ContainerStarted","Data":"5fef24f589e5224b60fc5340c948e406a3d50e59590c069b578fed13877eabc9"} Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.481658 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c79d9c44-5ps46" event={"ID":"52283875-2314-426a-b5ff-77a8b000f4cc","Type":"ContainerStarted","Data":"a7654e6e91075c3e39b40b01e79d934b06660ddbd6bee1d297fe27b3717ce47c"} Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.481716 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-7c79d9c44-5ps46" event={"ID":"52283875-2314-426a-b5ff-77a8b000f4cc","Type":"ContainerStarted","Data":"ee449a4b5124f368dd07e30adbbb1f6ee98c66e8e64f6fbd400f7d0058d6c423"} Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.481680 4665 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.481744 4665 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.494479 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-rkvs6" podStartSLOduration=4.649225919 podStartE2EDuration="1m5.494460035s" podCreationTimestamp="2025-12-05 01:31:38 +0000 UTC" firstStartedPulling="2025-12-05 01:31:40.387227539 +0000 UTC m=+1275.726619838" lastFinishedPulling="2025-12-05 01:32:41.232461655 +0000 UTC m=+1336.571853954" observedRunningTime="2025-12-05 01:32:43.491424281 +0000 UTC m=+1338.830816580" watchObservedRunningTime="2025-12-05 01:32:43.494460035 +0000 UTC m=+1338.833852344" Dec 05 01:32:43 crc kubenswrapper[4665]: I1205 01:32:43.518845 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-7c79d9c44-5ps46" podStartSLOduration=10.518825929 podStartE2EDuration="10.518825929s" podCreationTimestamp="2025-12-05 01:32:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:43.51635355 +0000 UTC m=+1338.855745849" watchObservedRunningTime="2025-12-05 01:32:43.518825929 +0000 UTC m=+1338.858218228" Dec 05 01:32:44 crc kubenswrapper[4665]: I1205 01:32:44.504823 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-79f86c7bd7-c4mss" event={"ID":"4d1f4f51-e293-418f-a305-a7699a6cb866","Type":"ContainerStarted","Data":"fa3e72044769411ae8780afad694fa478900bfec9b5ed2b2d1e54a6539512c5f"} Dec 05 01:32:44 crc kubenswrapper[4665]: I1205 01:32:44.505249 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:44 crc kubenswrapper[4665]: I1205 01:32:44.505262 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:32:44 crc kubenswrapper[4665]: I1205 01:32:44.532612 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-79f86c7bd7-c4mss" podStartSLOduration=2.532596979 podStartE2EDuration="2.532596979s" podCreationTimestamp="2025-12-05 01:32:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:44.528142322 +0000 UTC m=+1339.867534621" watchObservedRunningTime="2025-12-05 01:32:44.532596979 +0000 UTC m=+1339.871989278" Dec 05 01:32:45 crc kubenswrapper[4665]: I1205 01:32:45.517854 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:32:45 crc kubenswrapper[4665]: I1205 01:32:45.574032 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 01:32:45 crc kubenswrapper[4665]: I1205 01:32:45.574142 4665 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 01:32:46 crc kubenswrapper[4665]: I1205 01:32:46.299469 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 01:32:48 crc kubenswrapper[4665]: I1205 01:32:48.546923 4665 generic.go:334] "Generic (PLEG): container finished" podID="72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" containerID="4a837b7b6687971d541d6c711a9db225e2fb3d0353b364e895c3398c57d1df65" exitCode=0 Dec 05 01:32:48 crc kubenswrapper[4665]: I1205 01:32:48.546998 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-267l7" event={"ID":"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92","Type":"ContainerDied","Data":"4a837b7b6687971d541d6c711a9db225e2fb3d0353b364e895c3398c57d1df65"} Dec 05 01:32:50 crc kubenswrapper[4665]: I1205 01:32:50.859528 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:32:51 crc kubenswrapper[4665]: I1205 01:32:51.597922 4665 generic.go:334] "Generic (PLEG): container finished" podID="f2a58335-982b-42ff-933c-f93d38fbb197" containerID="5fef24f589e5224b60fc5340c948e406a3d50e59590c069b578fed13877eabc9" exitCode=0 Dec 05 01:32:51 crc kubenswrapper[4665]: I1205 01:32:51.597964 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rkvs6" event={"ID":"f2a58335-982b-42ff-933c-f93d38fbb197","Type":"ContainerDied","Data":"5fef24f589e5224b60fc5340c948e406a3d50e59590c069b578fed13877eabc9"} Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.270771 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-267l7" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.344920 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-86cd4c9876-glfvx" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.415283 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-combined-ca-bundle\") pod \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.415474 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-db-sync-config-data\") pod \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.415527 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnwsx\" (UniqueName: \"kubernetes.io/projected/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-kube-api-access-vnwsx\") pod \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\" (UID: \"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92\") " Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.421492 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" (UID: "72b6fbf7-1bb1-45c3-97a1-61da90bd1a92"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.421753 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-kube-api-access-vnwsx" (OuterVolumeSpecName: "kube-api-access-vnwsx") pod "72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" (UID: "72b6fbf7-1bb1-45c3-97a1-61da90bd1a92"). InnerVolumeSpecName "kube-api-access-vnwsx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.441116 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" (UID: "72b6fbf7-1bb1-45c3-97a1-61da90bd1a92"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.483894 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-644f785f4-mslbg" podUID="64407a72-3fdf-450f-b5c0-913ee74bb437" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.517204 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.517255 4665 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.517266 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnwsx\" (UniqueName: \"kubernetes.io/projected/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92-kube-api-access-vnwsx\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.609722 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-267l7" Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.609714 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-267l7" event={"ID":"72b6fbf7-1bb1-45c3-97a1-61da90bd1a92","Type":"ContainerDied","Data":"5bf65a4c44acd7eb92d14e2196872128112c35ad8a18a3d242e49d9e35964535"} Dec 05 01:32:52 crc kubenswrapper[4665]: I1205 01:32:52.609788 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5bf65a4c44acd7eb92d14e2196872128112c35ad8a18a3d242e49d9e35964535" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.637615 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-5bf7b484d7-782wq"] Dec 05 01:32:53 crc kubenswrapper[4665]: E1205 01:32:53.638254 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" containerName="barbican-db-sync" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.638266 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" containerName="barbican-db-sync" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.638471 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" containerName="barbican-db-sync" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.640384 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.649955 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-combined-ca-bundle\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.650010 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qpb9z\" (UniqueName: \"kubernetes.io/projected/9dd95469-e581-46aa-bbb2-c69214aa26c7-kube-api-access-qpb9z\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.650046 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-config-data\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.650076 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-config-data-custom\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.650105 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dd95469-e581-46aa-bbb2-c69214aa26c7-logs\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.656872 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.657063 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-44zp6" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.659248 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.681574 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5bf7b484d7-782wq"] Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.691196 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-rkvs6" event={"ID":"f2a58335-982b-42ff-933c-f93d38fbb197","Type":"ContainerDied","Data":"32671ec205d89a6ecbd15bb1aab71dd1f410bb3c2ab3195e0c53e89a3e9c3669"} Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.691231 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="32671ec205d89a6ecbd15bb1aab71dd1f410bb3c2ab3195e0c53e89a3e9c3669" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.692426 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.773550 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-config-data\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.773808 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-config-data-custom\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.773958 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dd95469-e581-46aa-bbb2-c69214aa26c7-logs\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.774265 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-combined-ca-bundle\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.774406 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qpb9z\" (UniqueName: \"kubernetes.io/projected/9dd95469-e581-46aa-bbb2-c69214aa26c7-kube-api-access-qpb9z\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.776288 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-d6596b4bb-7zqjr"] Dec 05 01:32:53 crc kubenswrapper[4665]: E1205 01:32:53.777481 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2a58335-982b-42ff-933c-f93d38fbb197" containerName="cinder-db-sync" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.777558 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2a58335-982b-42ff-933c-f93d38fbb197" containerName="cinder-db-sync" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.778027 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2a58335-982b-42ff-933c-f93d38fbb197" containerName="cinder-db-sync" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.806400 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9dd95469-e581-46aa-bbb2-c69214aa26c7-logs\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.807889 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-config-data\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.821228 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-combined-ca-bundle\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.854867 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9dd95469-e581-46aa-bbb2-c69214aa26c7-config-data-custom\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.855337 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qpb9z\" (UniqueName: \"kubernetes.io/projected/9dd95469-e581-46aa-bbb2-c69214aa26c7-kube-api-access-qpb9z\") pod \"barbican-worker-5bf7b484d7-782wq\" (UID: \"9dd95469-e581-46aa-bbb2-c69214aa26c7\") " pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.884615 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-combined-ca-bundle\") pod \"f2a58335-982b-42ff-933c-f93d38fbb197\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.884669 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-db-sync-config-data\") pod \"f2a58335-982b-42ff-933c-f93d38fbb197\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.884766 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-scripts\") pod \"f2a58335-982b-42ff-933c-f93d38fbb197\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.884884 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-config-data\") pod \"f2a58335-982b-42ff-933c-f93d38fbb197\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.884925 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f2a58335-982b-42ff-933c-f93d38fbb197-etc-machine-id\") pod \"f2a58335-982b-42ff-933c-f93d38fbb197\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.884946 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dm6fs\" (UniqueName: \"kubernetes.io/projected/f2a58335-982b-42ff-933c-f93d38fbb197-kube-api-access-dm6fs\") pod \"f2a58335-982b-42ff-933c-f93d38fbb197\" (UID: \"f2a58335-982b-42ff-933c-f93d38fbb197\") " Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.897367 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f2a58335-982b-42ff-933c-f93d38fbb197-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "f2a58335-982b-42ff-933c-f93d38fbb197" (UID: "f2a58335-982b-42ff-933c-f93d38fbb197"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.963016 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2a58335-982b-42ff-933c-f93d38fbb197-kube-api-access-dm6fs" (OuterVolumeSpecName: "kube-api-access-dm6fs") pod "f2a58335-982b-42ff-933c-f93d38fbb197" (UID: "f2a58335-982b-42ff-933c-f93d38fbb197"). InnerVolumeSpecName "kube-api-access-dm6fs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.963383 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-scripts" (OuterVolumeSpecName: "scripts") pod "f2a58335-982b-42ff-933c-f93d38fbb197" (UID: "f2a58335-982b-42ff-933c-f93d38fbb197"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:53 crc kubenswrapper[4665]: I1205 01:32:53.980526 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "f2a58335-982b-42ff-933c-f93d38fbb197" (UID: "f2a58335-982b-42ff-933c-f93d38fbb197"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.015180 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d6596b4bb-7zqjr"] Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.025380 4665 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/f2a58335-982b-42ff-933c-f93d38fbb197-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.025410 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dm6fs\" (UniqueName: \"kubernetes.io/projected/f2a58335-982b-42ff-933c-f93d38fbb197-kube-api-access-dm6fs\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.025421 4665 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.025429 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.026155 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.037981 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-5bf7b484d7-782wq" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.044609 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vmbzv"] Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.046096 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.056850 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.081032 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vmbzv"] Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.101464 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f2a58335-982b-42ff-933c-f93d38fbb197" (UID: "f2a58335-982b-42ff-933c-f93d38fbb197"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127096 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-config-data\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127147 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-config\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127180 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf44f27-5007-4db9-8784-715bdef486a0-logs\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127194 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-combined-ca-bundle\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127216 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127232 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127270 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kj9bk\" (UniqueName: \"kubernetes.io/projected/6cf44f27-5007-4db9-8784-715bdef486a0-kube-api-access-kj9bk\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127305 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127371 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-config-data-custom\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127391 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhs4l\" (UniqueName: \"kubernetes.io/projected/f722b6aa-393f-41b5-a8f2-d69db634ea70-kube-api-access-rhs4l\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127431 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-svc\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.127469 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.177878 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-config-data" (OuterVolumeSpecName: "config-data") pod "f2a58335-982b-42ff-933c-f93d38fbb197" (UID: "f2a58335-982b-42ff-933c-f93d38fbb197"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229276 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-config-data\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229419 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-config\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229453 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf44f27-5007-4db9-8784-715bdef486a0-logs\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229470 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-combined-ca-bundle\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229496 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229517 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229557 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kj9bk\" (UniqueName: \"kubernetes.io/projected/6cf44f27-5007-4db9-8784-715bdef486a0-kube-api-access-kj9bk\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229579 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229635 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-config-data-custom\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229660 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhs4l\" (UniqueName: \"kubernetes.io/projected/f722b6aa-393f-41b5-a8f2-d69db634ea70-kube-api-access-rhs4l\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229691 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-svc\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.229868 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f2a58335-982b-42ff-933c-f93d38fbb197-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.231282 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-nb\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.233379 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-swift-storage-0\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.233903 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-sb\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.234425 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6cf44f27-5007-4db9-8784-715bdef486a0-logs\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.235063 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-config\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.237981 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-svc\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.238266 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-combined-ca-bundle\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.245131 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-config-data\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.251891 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/6cf44f27-5007-4db9-8784-715bdef486a0-config-data-custom\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.259170 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhs4l\" (UniqueName: \"kubernetes.io/projected/f722b6aa-393f-41b5-a8f2-d69db634ea70-kube-api-access-rhs4l\") pod \"dnsmasq-dns-85ff748b95-vmbzv\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.308897 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kj9bk\" (UniqueName: \"kubernetes.io/projected/6cf44f27-5007-4db9-8784-715bdef486a0-kube-api-access-kj9bk\") pod \"barbican-keystone-listener-d6596b4bb-7zqjr\" (UID: \"6cf44f27-5007-4db9-8784-715bdef486a0\") " pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.347629 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-5b484f7985-8qkjq" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.418034 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.450756 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.464499 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7c5886d9b4-c8rm2"] Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.464768 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7c5886d9b4-c8rm2" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerName="neutron-api" containerID="cri-o://be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd" gracePeriod=30 Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.465282 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-7c5886d9b4-c8rm2" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerName="neutron-httpd" containerID="cri-o://bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef" gracePeriod=30 Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.638520 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-7ddc564cd6-nc8vp"] Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.640069 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.642820 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.665963 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7ddc564cd6-nc8vp"] Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.703440 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-rkvs6" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.741165 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.741234 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-combined-ca-bundle\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.741263 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data-custom\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.741323 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a657d0bd-fd03-44ca-b0a5-125e189f6a61-logs\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.741350 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7k7vs\" (UniqueName: \"kubernetes.io/projected/a657d0bd-fd03-44ca-b0a5-125e189f6a61-kube-api-access-7k7vs\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.842912 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.842979 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-combined-ca-bundle\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.843005 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data-custom\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.843050 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a657d0bd-fd03-44ca-b0a5-125e189f6a61-logs\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.843072 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7k7vs\" (UniqueName: \"kubernetes.io/projected/a657d0bd-fd03-44ca-b0a5-125e189f6a61-kube-api-access-7k7vs\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.844667 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a657d0bd-fd03-44ca-b0a5-125e189f6a61-logs\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.849190 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data-custom\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.849494 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.850014 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-combined-ca-bundle\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.888468 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7k7vs\" (UniqueName: \"kubernetes.io/projected/a657d0bd-fd03-44ca-b0a5-125e189f6a61-kube-api-access-7k7vs\") pod \"barbican-api-7ddc564cd6-nc8vp\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:54 crc kubenswrapper[4665]: I1205 01:32:54.979667 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.080572 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.082101 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.089980 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.090230 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-9kmh2" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.090453 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.090567 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.110408 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.206004 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vmbzv"] Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.248881 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.248981 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.249002 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58s5v\" (UniqueName: \"kubernetes.io/projected/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-kube-api-access-58s5v\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.249059 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.249081 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.249112 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.301143 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-9nqnv"] Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.302949 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.345891 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-9nqnv"] Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.369406 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.369477 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.369607 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.369629 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58s5v\" (UniqueName: \"kubernetes.io/projected/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-kube-api-access-58s5v\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.369715 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.369741 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.384397 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.394068 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-scripts\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.407968 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.419545 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58s5v\" (UniqueName: \"kubernetes.io/projected/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-kube-api-access-58s5v\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.428250 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.429030 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.471308 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.474274 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.475856 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.476752 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.476876 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-config\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.476985 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47a3fd04-4de6-4517-b713-0dc980538bb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477154 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-scripts\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477254 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477390 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w9vn2\" (UniqueName: \"kubernetes.io/projected/47a3fd04-4de6-4517-b713-0dc980538bb7-kube-api-access-w9vn2\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477653 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a3fd04-4de6-4517-b713-0dc980538bb7-logs\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477679 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477711 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477742 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477823 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljz2l\" (UniqueName: \"kubernetes.io/projected/7295fa1c-4087-4ae8-a38e-29edf3da8381-kube-api-access-ljz2l\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.477852 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.482978 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.489577 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.578881 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.578923 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.578940 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.578973 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-config\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.578993 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47a3fd04-4de6-4517-b713-0dc980538bb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.579044 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-scripts\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.579061 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.579075 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w9vn2\" (UniqueName: \"kubernetes.io/projected/47a3fd04-4de6-4517-b713-0dc980538bb7-kube-api-access-w9vn2\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.579090 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a3fd04-4de6-4517-b713-0dc980538bb7-logs\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.579105 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.579127 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.579151 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.579199 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljz2l\" (UniqueName: \"kubernetes.io/projected/7295fa1c-4087-4ae8-a38e-29edf3da8381-kube-api-access-ljz2l\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.580338 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-nb\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.580954 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-svc\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.581125 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47a3fd04-4de6-4517-b713-0dc980538bb7-etc-machine-id\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.582101 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-swift-storage-0\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.582671 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a3fd04-4de6-4517-b713-0dc980538bb7-logs\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.582927 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-sb\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.583050 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-config\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.585618 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.598197 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-scripts\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.599698 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w9vn2\" (UniqueName: \"kubernetes.io/projected/47a3fd04-4de6-4517-b713-0dc980538bb7-kube-api-access-w9vn2\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.599843 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.600450 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljz2l\" (UniqueName: \"kubernetes.io/projected/7295fa1c-4087-4ae8-a38e-29edf3da8381-kube-api-access-ljz2l\") pod \"dnsmasq-dns-5c9776ccc5-9nqnv\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.607836 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data-custom\") pod \"cinder-api-0\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " pod="openstack/cinder-api-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.624007 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.713426 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.729659 4665 generic.go:334] "Generic (PLEG): container finished" podID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerID="bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef" exitCode=0 Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.729717 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c5886d9b4-c8rm2" event={"ID":"f3c1e917-8b40-470c-88b8-5fa1a9c37665","Type":"ContainerDied","Data":"bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef"} Dec 05 01:32:55 crc kubenswrapper[4665]: I1205 01:32:55.823726 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.187731 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vmbzv"] Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.265777 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-7ddc564cd6-nc8vp"] Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.479425 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-9nqnv"] Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.482411 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-5bf7b484d7-782wq"] Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.680580 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.716685 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.731936 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-d6596b4bb-7zqjr"] Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.759027 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ddc564cd6-nc8vp" event={"ID":"a657d0bd-fd03-44ca-b0a5-125e189f6a61","Type":"ContainerStarted","Data":"8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68"} Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.759063 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ddc564cd6-nc8vp" event={"ID":"a657d0bd-fd03-44ca-b0a5-125e189f6a61","Type":"ContainerStarted","Data":"7a0d216f12f565c4723f207035b415a9a97d96586c69b15e4b59fbc963575a20"} Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.759862 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" event={"ID":"f722b6aa-393f-41b5-a8f2-d69db634ea70","Type":"ContainerStarted","Data":"dad2e277faf30d3233713b89c418014ea79f70d21dbe4782c56fbc1da7f67f16"} Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.764043 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerStarted","Data":"50163aad77345be6bf450a88e33395654a9c4ec4bb30ce71e409fdf2e02e09d2"} Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.764258 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="ceilometer-central-agent" containerID="cri-o://4262c2a6d6acc16e3edda86aafa5487037d44b0b9374b4972573af34a7f5279b" gracePeriod=30 Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.764429 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.764473 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="proxy-httpd" containerID="cri-o://50163aad77345be6bf450a88e33395654a9c4ec4bb30ce71e409fdf2e02e09d2" gracePeriod=30 Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.764554 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="sg-core" containerID="cri-o://4beef3c7d2d461d99f18a2ec654e3e26b1e5f78b1a90255d2bd4ba7feee12382" gracePeriod=30 Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.764597 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="ceilometer-notification-agent" containerID="cri-o://eb59fe1d1b872f3c8fc8b79d8d750264f43eac4650a8ce7a479a026dcaf2977e" gracePeriod=30 Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.768700 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bf7b484d7-782wq" event={"ID":"9dd95469-e581-46aa-bbb2-c69214aa26c7","Type":"ContainerStarted","Data":"b27292d8ebb52a2655f523bd1d3483fe7fd5803ef58a303c57e68bba11a0a33b"} Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.769945 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32","Type":"ContainerStarted","Data":"f1d27a0cedff9c02a0e6d15f17c66b66745a290939751e6bc7a8fb933852a527"} Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.777211 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"47a3fd04-4de6-4517-b713-0dc980538bb7","Type":"ContainerStarted","Data":"3479f2b532b2b588cc85bb8a3884f27a2154e92e0b3f2bcd3f486844bc791fc2"} Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.780905 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" event={"ID":"7295fa1c-4087-4ae8-a38e-29edf3da8381","Type":"ContainerStarted","Data":"658890844f8674ec454ea27ba3ffe079b12ccea50fd4645d53c8a477e644bbf8"} Dec 05 01:32:57 crc kubenswrapper[4665]: I1205 01:32:57.802333 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.421070412 podStartE2EDuration="1m19.802312754s" podCreationTimestamp="2025-12-05 01:31:38 +0000 UTC" firstStartedPulling="2025-12-05 01:31:40.955398473 +0000 UTC m=+1276.294790772" lastFinishedPulling="2025-12-05 01:32:56.336640815 +0000 UTC m=+1351.676033114" observedRunningTime="2025-12-05 01:32:57.784875506 +0000 UTC m=+1353.124267805" watchObservedRunningTime="2025-12-05 01:32:57.802312754 +0000 UTC m=+1353.141705043" Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.660957 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.801842 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ddc564cd6-nc8vp" event={"ID":"a657d0bd-fd03-44ca-b0a5-125e189f6a61","Type":"ContainerStarted","Data":"3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a"} Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.802344 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.802387 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.810155 4665 generic.go:334] "Generic (PLEG): container finished" podID="f722b6aa-393f-41b5-a8f2-d69db634ea70" containerID="4108aa12a5e9e8d11d2bd7b06db7c1c58b7b1454bb9114e34ab279df095553bb" exitCode=0 Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.810233 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" event={"ID":"f722b6aa-393f-41b5-a8f2-d69db634ea70","Type":"ContainerDied","Data":"4108aa12a5e9e8d11d2bd7b06db7c1c58b7b1454bb9114e34ab279df095553bb"} Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.813415 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" event={"ID":"6cf44f27-5007-4db9-8784-715bdef486a0","Type":"ContainerStarted","Data":"da935e7257f92e749967e0a19b05910fb67b1350e3350cf6e2cb92d6a9a42d19"} Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.831222 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-7ddc564cd6-nc8vp" podStartSLOduration=4.8312076269999995 podStartE2EDuration="4.831207627s" podCreationTimestamp="2025-12-05 01:32:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:58.82463668 +0000 UTC m=+1354.164028979" watchObservedRunningTime="2025-12-05 01:32:58.831207627 +0000 UTC m=+1354.170599926" Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.847257 4665 generic.go:334] "Generic (PLEG): container finished" podID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerID="50163aad77345be6bf450a88e33395654a9c4ec4bb30ce71e409fdf2e02e09d2" exitCode=0 Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.847285 4665 generic.go:334] "Generic (PLEG): container finished" podID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerID="4beef3c7d2d461d99f18a2ec654e3e26b1e5f78b1a90255d2bd4ba7feee12382" exitCode=2 Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.847306 4665 generic.go:334] "Generic (PLEG): container finished" podID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerID="eb59fe1d1b872f3c8fc8b79d8d750264f43eac4650a8ce7a479a026dcaf2977e" exitCode=0 Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.847312 4665 generic.go:334] "Generic (PLEG): container finished" podID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerID="4262c2a6d6acc16e3edda86aafa5487037d44b0b9374b4972573af34a7f5279b" exitCode=0 Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.847350 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerDied","Data":"50163aad77345be6bf450a88e33395654a9c4ec4bb30ce71e409fdf2e02e09d2"} Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.847376 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerDied","Data":"4beef3c7d2d461d99f18a2ec654e3e26b1e5f78b1a90255d2bd4ba7feee12382"} Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.847387 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerDied","Data":"eb59fe1d1b872f3c8fc8b79d8d750264f43eac4650a8ce7a479a026dcaf2977e"} Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.847395 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerDied","Data":"4262c2a6d6acc16e3edda86aafa5487037d44b0b9374b4972573af34a7f5279b"} Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.860883 4665 generic.go:334] "Generic (PLEG): container finished" podID="7295fa1c-4087-4ae8-a38e-29edf3da8381" containerID="02a7f65e33f49eae6044b2775ea7a60c718e98a854bbd72c9439c62762e196e3" exitCode=0 Dec 05 01:32:58 crc kubenswrapper[4665]: I1205 01:32:58.860925 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" event={"ID":"7295fa1c-4087-4ae8-a38e-29edf3da8381","Type":"ContainerDied","Data":"02a7f65e33f49eae6044b2775ea7a60c718e98a854bbd72c9439c62762e196e3"} Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.548149 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.697973 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhs4l\" (UniqueName: \"kubernetes.io/projected/f722b6aa-393f-41b5-a8f2-d69db634ea70-kube-api-access-rhs4l\") pod \"f722b6aa-393f-41b5-a8f2-d69db634ea70\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.698088 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-config\") pod \"f722b6aa-393f-41b5-a8f2-d69db634ea70\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.698144 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-swift-storage-0\") pod \"f722b6aa-393f-41b5-a8f2-d69db634ea70\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.698231 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-svc\") pod \"f722b6aa-393f-41b5-a8f2-d69db634ea70\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.698253 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-nb\") pod \"f722b6aa-393f-41b5-a8f2-d69db634ea70\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.698274 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-sb\") pod \"f722b6aa-393f-41b5-a8f2-d69db634ea70\" (UID: \"f722b6aa-393f-41b5-a8f2-d69db634ea70\") " Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.735578 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f722b6aa-393f-41b5-a8f2-d69db634ea70-kube-api-access-rhs4l" (OuterVolumeSpecName: "kube-api-access-rhs4l") pod "f722b6aa-393f-41b5-a8f2-d69db634ea70" (UID: "f722b6aa-393f-41b5-a8f2-d69db634ea70"). InnerVolumeSpecName "kube-api-access-rhs4l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.773041 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "f722b6aa-393f-41b5-a8f2-d69db634ea70" (UID: "f722b6aa-393f-41b5-a8f2-d69db634ea70"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.793226 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "f722b6aa-393f-41b5-a8f2-d69db634ea70" (UID: "f722b6aa-393f-41b5-a8f2-d69db634ea70"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.799938 4665 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.799961 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.799970 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhs4l\" (UniqueName: \"kubernetes.io/projected/f722b6aa-393f-41b5-a8f2-d69db634ea70-kube-api-access-rhs4l\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.801961 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "f722b6aa-393f-41b5-a8f2-d69db634ea70" (UID: "f722b6aa-393f-41b5-a8f2-d69db634ea70"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.803639 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-config" (OuterVolumeSpecName: "config") pod "f722b6aa-393f-41b5-a8f2-d69db634ea70" (UID: "f722b6aa-393f-41b5-a8f2-d69db634ea70"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.824962 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "f722b6aa-393f-41b5-a8f2-d69db634ea70" (UID: "f722b6aa-393f-41b5-a8f2-d69db634ea70"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.885907 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"6b995216-a8c4-418c-9a82-eff79ca5360c","Type":"ContainerDied","Data":"60a08a924e26a8d8ab25f853125cda0bd7de6cfcef34172f6900b38eb44285f5"} Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.885945 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60a08a924e26a8d8ab25f853125cda0bd7de6cfcef34172f6900b38eb44285f5" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.886362 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.887484 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"47a3fd04-4de6-4517-b713-0dc980538bb7","Type":"ContainerStarted","Data":"049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733"} Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.890006 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" event={"ID":"7295fa1c-4087-4ae8-a38e-29edf3da8381","Type":"ContainerStarted","Data":"02d04c8390c3a729b392254dd9073f799d10e34bd15cff5c73211b5c4e847bc1"} Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.891929 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.896017 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.896387 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-85ff748b95-vmbzv" event={"ID":"f722b6aa-393f-41b5-a8f2-d69db634ea70","Type":"ContainerDied","Data":"dad2e277faf30d3233713b89c418014ea79f70d21dbe4782c56fbc1da7f67f16"} Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.896424 4665 scope.go:117] "RemoveContainer" containerID="4108aa12a5e9e8d11d2bd7b06db7c1c58b7b1454bb9114e34ab279df095553bb" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.901575 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.901597 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.901606 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f722b6aa-393f-41b5-a8f2-d69db634ea70-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:32:59 crc kubenswrapper[4665]: I1205 01:32:59.962776 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" podStartSLOduration=4.962755795 podStartE2EDuration="4.962755795s" podCreationTimestamp="2025-12-05 01:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:32:59.953594895 +0000 UTC m=+1355.292987194" watchObservedRunningTime="2025-12-05 01:32:59.962755795 +0000 UTC m=+1355.302148094" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.002434 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-combined-ca-bundle\") pod \"6b995216-a8c4-418c-9a82-eff79ca5360c\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.002470 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-config-data\") pod \"6b995216-a8c4-418c-9a82-eff79ca5360c\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.002536 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tcgq\" (UniqueName: \"kubernetes.io/projected/6b995216-a8c4-418c-9a82-eff79ca5360c-kube-api-access-8tcgq\") pod \"6b995216-a8c4-418c-9a82-eff79ca5360c\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.002571 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-run-httpd\") pod \"6b995216-a8c4-418c-9a82-eff79ca5360c\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.002596 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-scripts\") pod \"6b995216-a8c4-418c-9a82-eff79ca5360c\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.002632 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-sg-core-conf-yaml\") pod \"6b995216-a8c4-418c-9a82-eff79ca5360c\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.002681 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-log-httpd\") pod \"6b995216-a8c4-418c-9a82-eff79ca5360c\" (UID: \"6b995216-a8c4-418c-9a82-eff79ca5360c\") " Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.007186 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "6b995216-a8c4-418c-9a82-eff79ca5360c" (UID: "6b995216-a8c4-418c-9a82-eff79ca5360c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.011047 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "6b995216-a8c4-418c-9a82-eff79ca5360c" (UID: "6b995216-a8c4-418c-9a82-eff79ca5360c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.039484 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-scripts" (OuterVolumeSpecName: "scripts") pod "6b995216-a8c4-418c-9a82-eff79ca5360c" (UID: "6b995216-a8c4-418c-9a82-eff79ca5360c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.039605 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6b995216-a8c4-418c-9a82-eff79ca5360c-kube-api-access-8tcgq" (OuterVolumeSpecName: "kube-api-access-8tcgq") pod "6b995216-a8c4-418c-9a82-eff79ca5360c" (UID: "6b995216-a8c4-418c-9a82-eff79ca5360c"). InnerVolumeSpecName "kube-api-access-8tcgq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.067431 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vmbzv"] Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.105465 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tcgq\" (UniqueName: \"kubernetes.io/projected/6b995216-a8c4-418c-9a82-eff79ca5360c-kube-api-access-8tcgq\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.105496 4665 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.105506 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.105516 4665 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/6b995216-a8c4-418c-9a82-eff79ca5360c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.108425 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "6b995216-a8c4-418c-9a82-eff79ca5360c" (UID: "6b995216-a8c4-418c-9a82-eff79ca5360c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.138735 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-85ff748b95-vmbzv"] Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.180972 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-config-data" (OuterVolumeSpecName: "config-data") pod "6b995216-a8c4-418c-9a82-eff79ca5360c" (UID: "6b995216-a8c4-418c-9a82-eff79ca5360c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.181707 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6b995216-a8c4-418c-9a82-eff79ca5360c" (UID: "6b995216-a8c4-418c-9a82-eff79ca5360c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.208120 4665 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.208383 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.208396 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6b995216-a8c4-418c-9a82-eff79ca5360c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.907961 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f722b6aa-393f-41b5-a8f2-d69db634ea70" path="/var/lib/kubelet/pods/f722b6aa-393f-41b5-a8f2-d69db634ea70/volumes" Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.918378 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32","Type":"ContainerStarted","Data":"cba11c36bf9292c821610b56b3254680622f9777228bc073eab1aca46cbbf022"} Dec 05 01:33:00 crc kubenswrapper[4665]: I1205 01:33:00.918558 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.036402 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.053329 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.074897 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:01 crc kubenswrapper[4665]: E1205 01:33:01.075316 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f722b6aa-393f-41b5-a8f2-d69db634ea70" containerName="init" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075328 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f722b6aa-393f-41b5-a8f2-d69db634ea70" containerName="init" Dec 05 01:33:01 crc kubenswrapper[4665]: E1205 01:33:01.075342 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="ceilometer-central-agent" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075348 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="ceilometer-central-agent" Dec 05 01:33:01 crc kubenswrapper[4665]: E1205 01:33:01.075378 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="ceilometer-notification-agent" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075384 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="ceilometer-notification-agent" Dec 05 01:33:01 crc kubenswrapper[4665]: E1205 01:33:01.075403 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="sg-core" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075409 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="sg-core" Dec 05 01:33:01 crc kubenswrapper[4665]: E1205 01:33:01.075418 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="proxy-httpd" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075424 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="proxy-httpd" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075618 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="ceilometer-central-agent" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075632 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f722b6aa-393f-41b5-a8f2-d69db634ea70" containerName="init" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075639 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="ceilometer-notification-agent" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075655 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="sg-core" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.075671 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" containerName="proxy-httpd" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.077535 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.090827 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.091072 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.096448 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.217713 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-config-data\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.217760 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-44mn2\" (UniqueName: \"kubernetes.io/projected/7d94611a-4825-4614-a16f-a206f227201c-kube-api-access-44mn2\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.217789 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.217814 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-scripts\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.217874 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-log-httpd\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.217894 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-run-httpd\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.217922 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.320018 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-log-httpd\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.320332 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-run-httpd\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.320366 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.320442 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-config-data\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.320467 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-44mn2\" (UniqueName: \"kubernetes.io/projected/7d94611a-4825-4614-a16f-a206f227201c-kube-api-access-44mn2\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.320494 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.320514 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-scripts\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.320575 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-log-httpd\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.321368 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-run-httpd\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.327875 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.328267 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-config-data\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.331183 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-scripts\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.332446 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.348468 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-44mn2\" (UniqueName: \"kubernetes.io/projected/7d94611a-4825-4614-a16f-a206f227201c-kube-api-access-44mn2\") pod \"ceilometer-0\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.472747 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.517766 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-6f55bbcd96-6gf9l"] Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.519462 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.524488 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.524794 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.533695 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f55bbcd96-6gf9l"] Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.625004 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-config-data-custom\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.625095 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-logs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.625139 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-internal-tls-certs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.625159 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-public-tls-certs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.625191 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-config-data\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.625227 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-combined-ca-bundle\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.625270 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r54hv\" (UniqueName: \"kubernetes.io/projected/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-kube-api-access-r54hv\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.727780 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-internal-tls-certs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.727835 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-public-tls-certs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.727869 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-config-data\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.727910 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-combined-ca-bundle\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.727956 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r54hv\" (UniqueName: \"kubernetes.io/projected/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-kube-api-access-r54hv\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.728082 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-config-data-custom\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.728116 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-logs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.728622 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-logs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.733664 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-config-data-custom\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.736512 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-internal-tls-certs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.738337 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-combined-ca-bundle\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.739169 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-config-data\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.739742 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-public-tls-certs\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.758191 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r54hv\" (UniqueName: \"kubernetes.io/projected/db4c2bcc-14d3-4129-89d9-e25d6c01ef02-kube-api-access-r54hv\") pod \"barbican-api-6f55bbcd96-6gf9l\" (UID: \"db4c2bcc-14d3-4129-89d9-e25d6c01ef02\") " pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.841640 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.947041 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"47a3fd04-4de6-4517-b713-0dc980538bb7","Type":"ContainerStarted","Data":"276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40"} Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.947100 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerName="cinder-api-log" containerID="cri-o://049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733" gracePeriod=30 Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.947258 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerName="cinder-api" containerID="cri-o://276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40" gracePeriod=30 Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.947516 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 01:33:01 crc kubenswrapper[4665]: I1205 01:33:01.980243 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=6.980222622 podStartE2EDuration="6.980222622s" podCreationTimestamp="2025-12-05 01:32:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:33:01.973214134 +0000 UTC m=+1357.312606463" watchObservedRunningTime="2025-12-05 01:33:01.980222622 +0000 UTC m=+1357.319614921" Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.343748 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-86cd4c9876-glfvx" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.343819 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.344849 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"5fee73ab6bf34e37ceb73d73cdc466ea8680df6b74400f1cd845b17c4378965f"} pod="openstack/horizon-86cd4c9876-glfvx" containerMessage="Container horizon failed startup probe, will be restarted" Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.344892 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-86cd4c9876-glfvx" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" containerID="cri-o://5fee73ab6bf34e37ceb73d73cdc466ea8680df6b74400f1cd845b17c4378965f" gracePeriod=30 Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.485053 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-644f785f4-mslbg" podUID="64407a72-3fdf-450f-b5c0-913ee74bb437" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.485381 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.486060 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="horizon" containerStatusID={"Type":"cri-o","ID":"0d22edabd62a01b81060d36b7b04935aa12a247013b11f7437c9d25fbca25bbb"} pod="openstack/horizon-644f785f4-mslbg" containerMessage="Container horizon failed startup probe, will be restarted" Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.486109 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-644f785f4-mslbg" podUID="64407a72-3fdf-450f-b5c0-913ee74bb437" containerName="horizon" containerID="cri-o://0d22edabd62a01b81060d36b7b04935aa12a247013b11f7437c9d25fbca25bbb" gracePeriod=30 Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.814205 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-6f55bbcd96-6gf9l"] Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.843849 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:02 crc kubenswrapper[4665]: W1205 01:33:02.862038 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7d94611a_4825_4614_a16f_a206f227201c.slice/crio-f36b8314d065e504613ed5b278190bf0c4287604ad5b63c86e47d6ed883d266f WatchSource:0}: Error finding container f36b8314d065e504613ed5b278190bf0c4287604ad5b63c86e47d6ed883d266f: Status 404 returned error can't find the container with id f36b8314d065e504613ed5b278190bf0c4287604ad5b63c86e47d6ed883d266f Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.923668 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6b995216-a8c4-418c-9a82-eff79ca5360c" path="/var/lib/kubelet/pods/6b995216-a8c4-418c-9a82-eff79ca5360c/volumes" Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.971795 4665 generic.go:334] "Generic (PLEG): container finished" podID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerID="049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733" exitCode=143 Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.971865 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"47a3fd04-4de6-4517-b713-0dc980538bb7","Type":"ContainerDied","Data":"049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733"} Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.978529 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerStarted","Data":"f36b8314d065e504613ed5b278190bf0c4287604ad5b63c86e47d6ed883d266f"} Dec 05 01:33:02 crc kubenswrapper[4665]: I1205 01:33:02.980090 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f55bbcd96-6gf9l" event={"ID":"db4c2bcc-14d3-4129-89d9-e25d6c01ef02","Type":"ContainerStarted","Data":"e542800a0fb3610b67b9ad7ec21c887a232d5e88db32b34acb409a2cdfb0d8fc"} Dec 05 01:33:03 crc kubenswrapper[4665]: I1205 01:33:03.995584 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f55bbcd96-6gf9l" event={"ID":"db4c2bcc-14d3-4129-89d9-e25d6c01ef02","Type":"ContainerStarted","Data":"401eac17f2a73900e52b1d270864e17e40450a901c2103bd5386dbb0ed3a4d67"} Dec 05 01:33:03 crc kubenswrapper[4665]: I1205 01:33:03.996098 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-6f55bbcd96-6gf9l" event={"ID":"db4c2bcc-14d3-4129-89d9-e25d6c01ef02","Type":"ContainerStarted","Data":"f76baa2b15e2b1210a9d2305779969e20c14654bd10edb39ba833f3b9a811aeb"} Dec 05 01:33:03 crc kubenswrapper[4665]: I1205 01:33:03.996444 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:03 crc kubenswrapper[4665]: I1205 01:33:03.996482 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.004564 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" event={"ID":"6cf44f27-5007-4db9-8784-715bdef486a0","Type":"ContainerStarted","Data":"c6d5375499e72d447a78a78390626f3ca3171e98438b7656f86fc6bc9cf85213"} Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.004609 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" event={"ID":"6cf44f27-5007-4db9-8784-715bdef486a0","Type":"ContainerStarted","Data":"805249491b5fb4f5ff8578a9811c81f0fc9d5800efb783ed9f5f36d52f2319c6"} Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.022774 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bf7b484d7-782wq" event={"ID":"9dd95469-e581-46aa-bbb2-c69214aa26c7","Type":"ContainerStarted","Data":"33ad2257e91ad2932fe14f85cccf3f3695ca99dab4006f3a3777269a5fec81b6"} Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.022820 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-5bf7b484d7-782wq" event={"ID":"9dd95469-e581-46aa-bbb2-c69214aa26c7","Type":"ContainerStarted","Data":"3ed3f1f8fe2a1e053e4f6f65d95d0fed723a90190948bc47ebc04e4f294322f1"} Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.023108 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-6f55bbcd96-6gf9l" podStartSLOduration=3.02308954 podStartE2EDuration="3.02308954s" podCreationTimestamp="2025-12-05 01:33:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:33:04.017159948 +0000 UTC m=+1359.356552247" watchObservedRunningTime="2025-12-05 01:33:04.02308954 +0000 UTC m=+1359.362481839" Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.044590 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32","Type":"ContainerStarted","Data":"bc2b3bf071dbd8c70d94a20cd5688adcbf948c0b03dde1e4d512678ff45c3179"} Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.072651 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-d6596b4bb-7zqjr" podStartSLOduration=6.595554509 podStartE2EDuration="11.07263326s" podCreationTimestamp="2025-12-05 01:32:53 +0000 UTC" firstStartedPulling="2025-12-05 01:32:57.751698829 +0000 UTC m=+1353.091091128" lastFinishedPulling="2025-12-05 01:33:02.22877758 +0000 UTC m=+1357.568169879" observedRunningTime="2025-12-05 01:33:04.036309118 +0000 UTC m=+1359.375701417" watchObservedRunningTime="2025-12-05 01:33:04.07263326 +0000 UTC m=+1359.412025559" Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.091716 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-5bf7b484d7-782wq" podStartSLOduration=6.390444704 podStartE2EDuration="11.091698207s" podCreationTimestamp="2025-12-05 01:32:53 +0000 UTC" firstStartedPulling="2025-12-05 01:32:57.510869737 +0000 UTC m=+1352.850262036" lastFinishedPulling="2025-12-05 01:33:02.21212324 +0000 UTC m=+1357.551515539" observedRunningTime="2025-12-05 01:33:04.059147406 +0000 UTC m=+1359.398539705" watchObservedRunningTime="2025-12-05 01:33:04.091698207 +0000 UTC m=+1359.431090506" Dec 05 01:33:04 crc kubenswrapper[4665]: I1205 01:33:04.113413 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=7.949495913 podStartE2EDuration="9.113389228s" podCreationTimestamp="2025-12-05 01:32:55 +0000 UTC" firstStartedPulling="2025-12-05 01:32:57.726488884 +0000 UTC m=+1353.065881183" lastFinishedPulling="2025-12-05 01:32:58.890382199 +0000 UTC m=+1354.229774498" observedRunningTime="2025-12-05 01:33:04.082901566 +0000 UTC m=+1359.422293865" watchObservedRunningTime="2025-12-05 01:33:04.113389228 +0000 UTC m=+1359.452781527" Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.059037 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerStarted","Data":"4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4"} Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.060337 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerStarted","Data":"6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62"} Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.626131 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.715840 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.723351 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-444px"] Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.723862 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-55f844cf75-444px" podUID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" containerName="dnsmasq-dns" containerID="cri-o://f513d9636eeced8ea3fe6a12e8c145314d7fb5360a09a4cb66f7bc867f2dcc80" gracePeriod=10 Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.739591 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/cinder-scheduler-0" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="cinder-scheduler" probeResult="failure" output="Get \"http://10.217.0.162:8080/\": dial tcp 10.217.0.162:8080: connect: connection refused" Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.744315 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.866009 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5c85\" (UniqueName: \"kubernetes.io/projected/f3c1e917-8b40-470c-88b8-5fa1a9c37665-kube-api-access-k5c85\") pod \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.866455 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-config\") pod \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.866525 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-httpd-config\") pod \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.866571 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-combined-ca-bundle\") pod \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.866592 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-ovndb-tls-certs\") pod \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\" (UID: \"f3c1e917-8b40-470c-88b8-5fa1a9c37665\") " Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.892674 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "f3c1e917-8b40-470c-88b8-5fa1a9c37665" (UID: "f3c1e917-8b40-470c-88b8-5fa1a9c37665"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.921623 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3c1e917-8b40-470c-88b8-5fa1a9c37665-kube-api-access-k5c85" (OuterVolumeSpecName: "kube-api-access-k5c85") pod "f3c1e917-8b40-470c-88b8-5fa1a9c37665" (UID: "f3c1e917-8b40-470c-88b8-5fa1a9c37665"). InnerVolumeSpecName "kube-api-access-k5c85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.984944 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5c85\" (UniqueName: \"kubernetes.io/projected/f3c1e917-8b40-470c-88b8-5fa1a9c37665-kube-api-access-k5c85\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:05 crc kubenswrapper[4665]: I1205 01:33:05.984978 4665 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.096908 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-config" (OuterVolumeSpecName: "config") pod "f3c1e917-8b40-470c-88b8-5fa1a9c37665" (UID: "f3c1e917-8b40-470c-88b8-5fa1a9c37665"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.098542 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.125499 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f3c1e917-8b40-470c-88b8-5fa1a9c37665" (UID: "f3c1e917-8b40-470c-88b8-5fa1a9c37665"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.128070 4665 generic.go:334] "Generic (PLEG): container finished" podID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerID="be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd" exitCode=0 Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.128161 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c5886d9b4-c8rm2" event={"ID":"f3c1e917-8b40-470c-88b8-5fa1a9c37665","Type":"ContainerDied","Data":"be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd"} Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.128187 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-7c5886d9b4-c8rm2" event={"ID":"f3c1e917-8b40-470c-88b8-5fa1a9c37665","Type":"ContainerDied","Data":"82e5049ccfae1ab5128c022bbe2e3ac51264511bcf0deadac50003bc1a6870d3"} Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.128204 4665 scope.go:117] "RemoveContainer" containerID="bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.128405 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-7c5886d9b4-c8rm2" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.154643 4665 generic.go:334] "Generic (PLEG): container finished" podID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" containerID="f513d9636eeced8ea3fe6a12e8c145314d7fb5360a09a4cb66f7bc867f2dcc80" exitCode=0 Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.155737 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-444px" event={"ID":"b14532d1-bdd2-4576-8d6a-b94c7d3fc137","Type":"ContainerDied","Data":"f513d9636eeced8ea3fe6a12e8c145314d7fb5360a09a4cb66f7bc867f2dcc80"} Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.202257 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.210504 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "f3c1e917-8b40-470c-88b8-5fa1a9c37665" (UID: "f3c1e917-8b40-470c-88b8-5fa1a9c37665"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.244269 4665 scope.go:117] "RemoveContainer" containerID="be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.306255 4665 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/f3c1e917-8b40-470c-88b8-5fa1a9c37665-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.335329 4665 scope.go:117] "RemoveContainer" containerID="bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef" Dec 05 01:33:06 crc kubenswrapper[4665]: E1205 01:33:06.348579 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef\": container with ID starting with bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef not found: ID does not exist" containerID="bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.348629 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef"} err="failed to get container status \"bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef\": rpc error: code = NotFound desc = could not find container \"bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef\": container with ID starting with bd05f6b48e43050e377d15a2fcb042f47a538daec1f0710625751ca71e3f29ef not found: ID does not exist" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.348657 4665 scope.go:117] "RemoveContainer" containerID="be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd" Dec 05 01:33:06 crc kubenswrapper[4665]: E1205 01:33:06.350699 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd\": container with ID starting with be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd not found: ID does not exist" containerID="be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.350732 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd"} err="failed to get container status \"be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd\": rpc error: code = NotFound desc = could not find container \"be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd\": container with ID starting with be4047119aa4bef93cd3090e7d99350f73d834475fa53d2a7b7504c6fb2866dd not found: ID does not exist" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.494331 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7c5886d9b4-c8rm2"] Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.501556 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7c5886d9b4-c8rm2"] Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.533452 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.610863 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-swift-storage-0\") pod \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.610908 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnbzn\" (UniqueName: \"kubernetes.io/projected/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-kube-api-access-nnbzn\") pod \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.610930 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-config\") pod \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.610948 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-svc\") pod \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.610969 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-sb\") pod \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.611062 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-nb\") pod \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\" (UID: \"b14532d1-bdd2-4576-8d6a-b94c7d3fc137\") " Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.639535 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-kube-api-access-nnbzn" (OuterVolumeSpecName: "kube-api-access-nnbzn") pod "b14532d1-bdd2-4576-8d6a-b94c7d3fc137" (UID: "b14532d1-bdd2-4576-8d6a-b94c7d3fc137"). InnerVolumeSpecName "kube-api-access-nnbzn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.703027 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "b14532d1-bdd2-4576-8d6a-b94c7d3fc137" (UID: "b14532d1-bdd2-4576-8d6a-b94c7d3fc137"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.714839 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnbzn\" (UniqueName: \"kubernetes.io/projected/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-kube-api-access-nnbzn\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.714878 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.725098 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "b14532d1-bdd2-4576-8d6a-b94c7d3fc137" (UID: "b14532d1-bdd2-4576-8d6a-b94c7d3fc137"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.726753 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-config" (OuterVolumeSpecName: "config") pod "b14532d1-bdd2-4576-8d6a-b94c7d3fc137" (UID: "b14532d1-bdd2-4576-8d6a-b94c7d3fc137"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.736701 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "b14532d1-bdd2-4576-8d6a-b94c7d3fc137" (UID: "b14532d1-bdd2-4576-8d6a-b94c7d3fc137"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.803092 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "b14532d1-bdd2-4576-8d6a-b94c7d3fc137" (UID: "b14532d1-bdd2-4576-8d6a-b94c7d3fc137"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.817433 4665 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.817465 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.817474 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.817503 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/b14532d1-bdd2-4576-8d6a-b94c7d3fc137-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:06 crc kubenswrapper[4665]: I1205 01:33:06.903844 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" path="/var/lib/kubelet/pods/f3c1e917-8b40-470c-88b8-5fa1a9c37665/volumes" Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.171647 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-55f844cf75-444px" Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.171635 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-55f844cf75-444px" event={"ID":"b14532d1-bdd2-4576-8d6a-b94c7d3fc137","Type":"ContainerDied","Data":"66e1e7245a47e4bb63fab24717a3fd5977e20a8fafc6026ed4ccd1a93dc95689"} Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.172082 4665 scope.go:117] "RemoveContainer" containerID="f513d9636eeced8ea3fe6a12e8c145314d7fb5360a09a4cb66f7bc867f2dcc80" Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.183859 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerStarted","Data":"896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da"} Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.208792 4665 scope.go:117] "RemoveContainer" containerID="8258ff95ffefd31863b1177430013f69d3c657acedb8f275d783bcd27f324e88" Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.213821 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-444px"] Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.224680 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-55f844cf75-444px"] Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.827838 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:33:07 crc kubenswrapper[4665]: I1205 01:33:07.833587 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-7c79d9c44-5ps46" Dec 05 01:33:08 crc kubenswrapper[4665]: I1205 01:33:08.199657 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerStarted","Data":"4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc"} Dec 05 01:33:08 crc kubenswrapper[4665]: I1205 01:33:08.200901 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:33:08 crc kubenswrapper[4665]: I1205 01:33:08.231691 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.815724625 podStartE2EDuration="7.231674409s" podCreationTimestamp="2025-12-05 01:33:01 +0000 UTC" firstStartedPulling="2025-12-05 01:33:02.884936583 +0000 UTC m=+1358.224328882" lastFinishedPulling="2025-12-05 01:33:07.300886367 +0000 UTC m=+1362.640278666" observedRunningTime="2025-12-05 01:33:08.223663305 +0000 UTC m=+1363.563055604" watchObservedRunningTime="2025-12-05 01:33:08.231674409 +0000 UTC m=+1363.571066708" Dec 05 01:33:08 crc kubenswrapper[4665]: I1205 01:33:08.661170 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:33:08 crc kubenswrapper[4665]: I1205 01:33:08.822238 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:33:08 crc kubenswrapper[4665]: I1205 01:33:08.926714 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" path="/var/lib/kubelet/pods/b14532d1-bdd2-4576-8d6a-b94c7d3fc137/volumes" Dec 05 01:33:09 crc kubenswrapper[4665]: I1205 01:33:09.851777 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 01:33:10 crc kubenswrapper[4665]: I1205 01:33:10.949844 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 01:33:11 crc kubenswrapper[4665]: I1205 01:33:11.011960 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:33:11 crc kubenswrapper[4665]: I1205 01:33:11.227063 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="cinder-scheduler" containerID="cri-o://cba11c36bf9292c821610b56b3254680622f9777228bc073eab1aca46cbbf022" gracePeriod=30 Dec 05 01:33:11 crc kubenswrapper[4665]: I1205 01:33:11.227619 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="probe" containerID="cri-o://bc2b3bf071dbd8c70d94a20cd5688adcbf948c0b03dde1e4d512678ff45c3179" gracePeriod=30 Dec 05 01:33:12 crc kubenswrapper[4665]: I1205 01:33:12.237084 4665 generic.go:334] "Generic (PLEG): container finished" podID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerID="bc2b3bf071dbd8c70d94a20cd5688adcbf948c0b03dde1e4d512678ff45c3179" exitCode=0 Dec 05 01:33:12 crc kubenswrapper[4665]: I1205 01:33:12.237383 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32","Type":"ContainerDied","Data":"bc2b3bf071dbd8c70d94a20cd5688adcbf948c0b03dde1e4d512678ff45c3179"} Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.263568 4665 generic.go:334] "Generic (PLEG): container finished" podID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerID="cba11c36bf9292c821610b56b3254680622f9777228bc073eab1aca46cbbf022" exitCode=0 Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.263814 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32","Type":"ContainerDied","Data":"cba11c36bf9292c821610b56b3254680622f9777228bc073eab1aca46cbbf022"} Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.432827 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.557993 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.583228 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-scripts\") pod \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.583396 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-etc-machine-id\") pod \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.583467 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data-custom\") pod \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.583506 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58s5v\" (UniqueName: \"kubernetes.io/projected/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-kube-api-access-58s5v\") pod \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.583529 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-combined-ca-bundle\") pod \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.583520 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" (UID: "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.583573 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data\") pod \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\" (UID: \"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32\") " Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.583991 4665 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.590561 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-kube-api-access-58s5v" (OuterVolumeSpecName: "kube-api-access-58s5v") pod "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" (UID: "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32"). InnerVolumeSpecName "kube-api-access-58s5v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.604276 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-scripts" (OuterVolumeSpecName: "scripts") pod "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" (UID: "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.607452 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" (UID: "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.675947 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" (UID: "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.686922 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58s5v\" (UniqueName: \"kubernetes.io/projected/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-kube-api-access-58s5v\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.687285 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.687319 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.687329 4665 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.742516 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data" (OuterVolumeSpecName: "config-data") pod "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" (UID: "ffeb9e18-12ca-4602-a4d0-a57eb38b5e32"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.788751 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.888393 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-6f55bbcd96-6gf9l" Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.985021 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7ddc564cd6-nc8vp"] Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.985742 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7ddc564cd6-nc8vp" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api" containerID="cri-o://3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a" gracePeriod=30 Dec 05 01:33:13 crc kubenswrapper[4665]: I1205 01:33:13.985674 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-7ddc564cd6-nc8vp" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api-log" containerID="cri-o://8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68" gracePeriod=30 Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.289772 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"ffeb9e18-12ca-4602-a4d0-a57eb38b5e32","Type":"ContainerDied","Data":"f1d27a0cedff9c02a0e6d15f17c66b66745a290939751e6bc7a8fb933852a527"} Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.289822 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.289842 4665 scope.go:117] "RemoveContainer" containerID="bc2b3bf071dbd8c70d94a20cd5688adcbf948c0b03dde1e4d512678ff45c3179" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.329023 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.331575 4665 scope.go:117] "RemoveContainer" containerID="cba11c36bf9292c821610b56b3254680622f9777228bc073eab1aca46cbbf022" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.343417 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.363393 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:33:14 crc kubenswrapper[4665]: E1205 01:33:14.363745 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerName="neutron-api" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.363761 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerName="neutron-api" Dec 05 01:33:14 crc kubenswrapper[4665]: E1205 01:33:14.363777 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="probe" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.363784 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="probe" Dec 05 01:33:14 crc kubenswrapper[4665]: E1205 01:33:14.363797 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="cinder-scheduler" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.363805 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="cinder-scheduler" Dec 05 01:33:14 crc kubenswrapper[4665]: E1205 01:33:14.363819 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" containerName="init" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.363825 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" containerName="init" Dec 05 01:33:14 crc kubenswrapper[4665]: E1205 01:33:14.363839 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerName="neutron-httpd" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.363845 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerName="neutron-httpd" Dec 05 01:33:14 crc kubenswrapper[4665]: E1205 01:33:14.363854 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" containerName="dnsmasq-dns" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.363860 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" containerName="dnsmasq-dns" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.364051 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b14532d1-bdd2-4576-8d6a-b94c7d3fc137" containerName="dnsmasq-dns" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.364076 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerName="neutron-api" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.364090 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="probe" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.364102 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" containerName="cinder-scheduler" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.364114 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3c1e917-8b40-470c-88b8-5fa1a9c37665" containerName="neutron-httpd" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.365045 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.369563 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.402323 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.402366 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5d1a4169-4d66-47db-a16d-c3f77df4334a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.402387 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7csqh\" (UniqueName: \"kubernetes.io/projected/5d1a4169-4d66-47db-a16d-c3f77df4334a-kube-api-access-7csqh\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.402431 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-scripts\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.402456 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.402628 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-config-data\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.416727 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.504780 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-config-data\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.504873 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.504903 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5d1a4169-4d66-47db-a16d-c3f77df4334a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.504920 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7csqh\" (UniqueName: \"kubernetes.io/projected/5d1a4169-4d66-47db-a16d-c3f77df4334a-kube-api-access-7csqh\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.504951 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-scripts\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.504969 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.505447 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/5d1a4169-4d66-47db-a16d-c3f77df4334a-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.512697 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.513412 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-config-data\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.515157 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.516612 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5d1a4169-4d66-47db-a16d-c3f77df4334a-scripts\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.528150 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7csqh\" (UniqueName: \"kubernetes.io/projected/5d1a4169-4d66-47db-a16d-c3f77df4334a-kube-api-access-7csqh\") pod \"cinder-scheduler-0\" (UID: \"5d1a4169-4d66-47db-a16d-c3f77df4334a\") " pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.702106 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 01:33:14 crc kubenswrapper[4665]: I1205 01:33:14.919859 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffeb9e18-12ca-4602-a4d0-a57eb38b5e32" path="/var/lib/kubelet/pods/ffeb9e18-12ca-4602-a4d0-a57eb38b5e32/volumes" Dec 05 01:33:15 crc kubenswrapper[4665]: I1205 01:33:15.265268 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 01:33:15 crc kubenswrapper[4665]: I1205 01:33:15.342205 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ddc564cd6-nc8vp" event={"ID":"a657d0bd-fd03-44ca-b0a5-125e189f6a61","Type":"ContainerDied","Data":"8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68"} Dec 05 01:33:15 crc kubenswrapper[4665]: I1205 01:33:15.341664 4665 generic.go:334] "Generic (PLEG): container finished" podID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerID="8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68" exitCode=143 Dec 05 01:33:15 crc kubenswrapper[4665]: I1205 01:33:15.352385 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5d1a4169-4d66-47db-a16d-c3f77df4334a","Type":"ContainerStarted","Data":"c24a5d96595289eb79637cf1a7dea8cadb51b4fba0a81d52b4c774dd77301140"} Dec 05 01:33:15 crc kubenswrapper[4665]: I1205 01:33:15.649352 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-79f86c7bd7-c4mss" Dec 05 01:33:16 crc kubenswrapper[4665]: I1205 01:33:16.366186 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5d1a4169-4d66-47db-a16d-c3f77df4334a","Type":"ContainerStarted","Data":"e63f68b5fd6c1b0fae322201d25d2dab62f658a4ff2b20efefb3c7cfd7d0c64a"} Dec 05 01:33:17 crc kubenswrapper[4665]: I1205 01:33:17.379414 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"5d1a4169-4d66-47db-a16d-c3f77df4334a","Type":"ContainerStarted","Data":"b89f248de706c7a154232db3d35bc6ab0a966748af560720362c23ef7e7ab440"} Dec 05 01:33:17 crc kubenswrapper[4665]: I1205 01:33:17.404388 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.404364998 podStartE2EDuration="3.404364998s" podCreationTimestamp="2025-12-05 01:33:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:33:17.39903042 +0000 UTC m=+1372.738422719" watchObservedRunningTime="2025-12-05 01:33:17.404364998 +0000 UTC m=+1372.743757297" Dec 05 01:33:17 crc kubenswrapper[4665]: I1205 01:33:17.538594 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ddc564cd6-nc8vp" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api-log" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:44626->10.217.0.161:9311: read: connection reset by peer" Dec 05 01:33:17 crc kubenswrapper[4665]: I1205 01:33:17.539440 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/barbican-api-7ddc564cd6-nc8vp" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api" probeResult="failure" output="Get \"http://10.217.0.161:9311/healthcheck\": read tcp 10.217.0.2:44640->10.217.0.161:9311: read: connection reset by peer" Dec 05 01:33:17 crc kubenswrapper[4665]: I1205 01:33:17.969005 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.097868 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a657d0bd-fd03-44ca-b0a5-125e189f6a61-logs\") pod \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.098124 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7k7vs\" (UniqueName: \"kubernetes.io/projected/a657d0bd-fd03-44ca-b0a5-125e189f6a61-kube-api-access-7k7vs\") pod \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.098179 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-combined-ca-bundle\") pod \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.098204 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data-custom\") pod \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.098247 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data\") pod \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\" (UID: \"a657d0bd-fd03-44ca-b0a5-125e189f6a61\") " Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.099189 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a657d0bd-fd03-44ca-b0a5-125e189f6a61-logs" (OuterVolumeSpecName: "logs") pod "a657d0bd-fd03-44ca-b0a5-125e189f6a61" (UID: "a657d0bd-fd03-44ca-b0a5-125e189f6a61"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.103101 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a657d0bd-fd03-44ca-b0a5-125e189f6a61" (UID: "a657d0bd-fd03-44ca-b0a5-125e189f6a61"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.110495 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a657d0bd-fd03-44ca-b0a5-125e189f6a61-kube-api-access-7k7vs" (OuterVolumeSpecName: "kube-api-access-7k7vs") pod "a657d0bd-fd03-44ca-b0a5-125e189f6a61" (UID: "a657d0bd-fd03-44ca-b0a5-125e189f6a61"). InnerVolumeSpecName "kube-api-access-7k7vs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.135663 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "a657d0bd-fd03-44ca-b0a5-125e189f6a61" (UID: "a657d0bd-fd03-44ca-b0a5-125e189f6a61"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.153281 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data" (OuterVolumeSpecName: "config-data") pod "a657d0bd-fd03-44ca-b0a5-125e189f6a61" (UID: "a657d0bd-fd03-44ca-b0a5-125e189f6a61"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.200233 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a657d0bd-fd03-44ca-b0a5-125e189f6a61-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.200266 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7k7vs\" (UniqueName: \"kubernetes.io/projected/a657d0bd-fd03-44ca-b0a5-125e189f6a61-kube-api-access-7k7vs\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.200279 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.200288 4665 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.200312 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a657d0bd-fd03-44ca-b0a5-125e189f6a61-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.245154 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 01:33:18 crc kubenswrapper[4665]: E1205 01:33:18.245540 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.245556 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api" Dec 05 01:33:18 crc kubenswrapper[4665]: E1205 01:33:18.245596 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api-log" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.245602 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api-log" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.245755 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.245774 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerName="barbican-api-log" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.246371 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.248651 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.248838 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-fjgbm" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.251688 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.269411 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.302018 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a8461ae3-f75f-42de-b320-c9dc4b1545ec-openstack-config\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.302071 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wn6s\" (UniqueName: \"kubernetes.io/projected/a8461ae3-f75f-42de-b320-c9dc4b1545ec-kube-api-access-2wn6s\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.302141 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a8461ae3-f75f-42de-b320-c9dc4b1545ec-openstack-config-secret\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.302173 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8461ae3-f75f-42de-b320-c9dc4b1545ec-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.388786 4665 generic.go:334] "Generic (PLEG): container finished" podID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" containerID="3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a" exitCode=0 Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.388872 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-7ddc564cd6-nc8vp" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.388875 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ddc564cd6-nc8vp" event={"ID":"a657d0bd-fd03-44ca-b0a5-125e189f6a61","Type":"ContainerDied","Data":"3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a"} Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.388921 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-7ddc564cd6-nc8vp" event={"ID":"a657d0bd-fd03-44ca-b0a5-125e189f6a61","Type":"ContainerDied","Data":"7a0d216f12f565c4723f207035b415a9a97d96586c69b15e4b59fbc963575a20"} Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.388938 4665 scope.go:117] "RemoveContainer" containerID="3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.403566 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a8461ae3-f75f-42de-b320-c9dc4b1545ec-openstack-config\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.403611 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wn6s\" (UniqueName: \"kubernetes.io/projected/a8461ae3-f75f-42de-b320-c9dc4b1545ec-kube-api-access-2wn6s\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.403697 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a8461ae3-f75f-42de-b320-c9dc4b1545ec-openstack-config-secret\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.403726 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8461ae3-f75f-42de-b320-c9dc4b1545ec-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.404406 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a8461ae3-f75f-42de-b320-c9dc4b1545ec-openstack-config\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.409869 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a8461ae3-f75f-42de-b320-c9dc4b1545ec-combined-ca-bundle\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.410744 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a8461ae3-f75f-42de-b320-c9dc4b1545ec-openstack-config-secret\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.422695 4665 scope.go:117] "RemoveContainer" containerID="8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.425929 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wn6s\" (UniqueName: \"kubernetes.io/projected/a8461ae3-f75f-42de-b320-c9dc4b1545ec-kube-api-access-2wn6s\") pod \"openstackclient\" (UID: \"a8461ae3-f75f-42de-b320-c9dc4b1545ec\") " pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.429436 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-7ddc564cd6-nc8vp"] Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.437956 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-7ddc564cd6-nc8vp"] Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.443633 4665 scope.go:117] "RemoveContainer" containerID="3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a" Dec 05 01:33:18 crc kubenswrapper[4665]: E1205 01:33:18.444065 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a\": container with ID starting with 3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a not found: ID does not exist" containerID="3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.444096 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a"} err="failed to get container status \"3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a\": rpc error: code = NotFound desc = could not find container \"3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a\": container with ID starting with 3de0c157e5fece4288ac7e3f58f62ce2f1c195628949abbdb583e6f2a028880a not found: ID does not exist" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.444117 4665 scope.go:117] "RemoveContainer" containerID="8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68" Dec 05 01:33:18 crc kubenswrapper[4665]: E1205 01:33:18.444450 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68\": container with ID starting with 8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68 not found: ID does not exist" containerID="8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.444471 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68"} err="failed to get container status \"8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68\": rpc error: code = NotFound desc = could not find container \"8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68\": container with ID starting with 8de849a3df57fdb1079ffdbc4cbb72bcfb711ebc213d0812d13496defd3d0c68 not found: ID does not exist" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.576859 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 01:33:18 crc kubenswrapper[4665]: I1205 01:33:18.904474 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a657d0bd-fd03-44ca-b0a5-125e189f6a61" path="/var/lib/kubelet/pods/a657d0bd-fd03-44ca-b0a5-125e189f6a61/volumes" Dec 05 01:33:19 crc kubenswrapper[4665]: I1205 01:33:19.062503 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 01:33:19 crc kubenswrapper[4665]: I1205 01:33:19.413734 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a8461ae3-f75f-42de-b320-c9dc4b1545ec","Type":"ContainerStarted","Data":"159d62c3920988f79892d6c5df9717fddaed7f9cf975cb7c312781f17b782985"} Dec 05 01:33:19 crc kubenswrapper[4665]: I1205 01:33:19.702520 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 01:33:24 crc kubenswrapper[4665]: I1205 01:33:24.959934 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.232356 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5f4f69cd4c-jp87r"] Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.234609 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.238566 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.238750 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.238918 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.277502 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5f4f69cd4c-jp87r"] Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.328150 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-run-httpd\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.328213 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-combined-ca-bundle\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.328256 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-etc-swift\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.328305 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8s52\" (UniqueName: \"kubernetes.io/projected/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-kube-api-access-n8s52\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.328365 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-config-data\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.328404 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-internal-tls-certs\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.328456 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-public-tls-certs\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.328500 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-log-httpd\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.430560 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-run-httpd\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.430961 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-combined-ca-bundle\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.430969 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-run-httpd\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.431013 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-etc-swift\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.431105 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8s52\" (UniqueName: \"kubernetes.io/projected/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-kube-api-access-n8s52\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.431162 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-config-data\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.431196 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-internal-tls-certs\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.431244 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-public-tls-certs\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.431283 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-log-httpd\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.431552 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-log-httpd\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.440159 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-combined-ca-bundle\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.445951 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-internal-tls-certs\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.446754 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-etc-swift\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.447409 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-public-tls-certs\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.469388 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8s52\" (UniqueName: \"kubernetes.io/projected/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-kube-api-access-n8s52\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.472919 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5cf42108-5fbb-4bb1-b941-b2181f99fa5c-config-data\") pod \"swift-proxy-5f4f69cd4c-jp87r\" (UID: \"5cf42108-5fbb-4bb1-b941-b2181f99fa5c\") " pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:25 crc kubenswrapper[4665]: I1205 01:33:25.579916 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:26 crc kubenswrapper[4665]: I1205 01:33:26.529788 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:26 crc kubenswrapper[4665]: I1205 01:33:26.534841 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="ceilometer-central-agent" containerID="cri-o://6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62" gracePeriod=30 Dec 05 01:33:26 crc kubenswrapper[4665]: I1205 01:33:26.535051 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="proxy-httpd" containerID="cri-o://4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc" gracePeriod=30 Dec 05 01:33:26 crc kubenswrapper[4665]: I1205 01:33:26.535097 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="sg-core" containerID="cri-o://896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da" gracePeriod=30 Dec 05 01:33:26 crc kubenswrapper[4665]: I1205 01:33:26.535131 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="ceilometer-notification-agent" containerID="cri-o://4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4" gracePeriod=30 Dec 05 01:33:26 crc kubenswrapper[4665]: I1205 01:33:26.543880 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 01:33:27 crc kubenswrapper[4665]: I1205 01:33:27.547031 4665 generic.go:334] "Generic (PLEG): container finished" podID="7d94611a-4825-4614-a16f-a206f227201c" containerID="4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc" exitCode=0 Dec 05 01:33:27 crc kubenswrapper[4665]: I1205 01:33:27.547058 4665 generic.go:334] "Generic (PLEG): container finished" podID="7d94611a-4825-4614-a16f-a206f227201c" containerID="896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da" exitCode=2 Dec 05 01:33:27 crc kubenswrapper[4665]: I1205 01:33:27.547066 4665 generic.go:334] "Generic (PLEG): container finished" podID="7d94611a-4825-4614-a16f-a206f227201c" containerID="6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62" exitCode=0 Dec 05 01:33:27 crc kubenswrapper[4665]: I1205 01:33:27.547084 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerDied","Data":"4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc"} Dec 05 01:33:27 crc kubenswrapper[4665]: I1205 01:33:27.547109 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerDied","Data":"896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da"} Dec 05 01:33:27 crc kubenswrapper[4665]: I1205 01:33:27.547119 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerDied","Data":"6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62"} Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.224764 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-whn5c"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.226234 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.238079 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-whn5c"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.336974 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-hctnc"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.338253 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.350640 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-hctnc"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.387198 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/785a409c-4cf0-4d32-a459-576b739f4b4b-operator-scripts\") pod \"nova-api-db-create-whn5c\" (UID: \"785a409c-4cf0-4d32-a459-576b739f4b4b\") " pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.387445 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9ffr6\" (UniqueName: \"kubernetes.io/projected/785a409c-4cf0-4d32-a459-576b739f4b4b-kube-api-access-9ffr6\") pod \"nova-api-db-create-whn5c\" (UID: \"785a409c-4cf0-4d32-a459-576b739f4b4b\") " pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.428953 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-977b-account-create-update-d9c7b"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.430000 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.434948 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.476342 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-977b-account-create-update-d9c7b"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.489440 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-operator-scripts\") pod \"nova-cell0-db-create-hctnc\" (UID: \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\") " pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.489558 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s42bb\" (UniqueName: \"kubernetes.io/projected/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-kube-api-access-s42bb\") pod \"nova-cell0-db-create-hctnc\" (UID: \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\") " pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.489631 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/785a409c-4cf0-4d32-a459-576b739f4b4b-operator-scripts\") pod \"nova-api-db-create-whn5c\" (UID: \"785a409c-4cf0-4d32-a459-576b739f4b4b\") " pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.489663 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9ffr6\" (UniqueName: \"kubernetes.io/projected/785a409c-4cf0-4d32-a459-576b739f4b4b-kube-api-access-9ffr6\") pod \"nova-api-db-create-whn5c\" (UID: \"785a409c-4cf0-4d32-a459-576b739f4b4b\") " pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.490871 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/785a409c-4cf0-4d32-a459-576b739f4b4b-operator-scripts\") pod \"nova-api-db-create-whn5c\" (UID: \"785a409c-4cf0-4d32-a459-576b739f4b4b\") " pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.493725 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-2blsh"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.502785 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-2blsh"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.502893 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.518913 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9ffr6\" (UniqueName: \"kubernetes.io/projected/785a409c-4cf0-4d32-a459-576b739f4b4b-kube-api-access-9ffr6\") pod \"nova-api-db-create-whn5c\" (UID: \"785a409c-4cf0-4d32-a459-576b739f4b4b\") " pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.574876 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.591252 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-operator-scripts\") pod \"nova-cell0-db-create-hctnc\" (UID: \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\") " pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.591605 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q44xw\" (UniqueName: \"kubernetes.io/projected/e60d2002-ca42-44ac-ac85-cb8c412a601e-kube-api-access-q44xw\") pod \"nova-api-977b-account-create-update-d9c7b\" (UID: \"e60d2002-ca42-44ac-ac85-cb8c412a601e\") " pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.591691 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s42bb\" (UniqueName: \"kubernetes.io/projected/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-kube-api-access-s42bb\") pod \"nova-cell0-db-create-hctnc\" (UID: \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\") " pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.591734 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60d2002-ca42-44ac-ac85-cb8c412a601e-operator-scripts\") pod \"nova-api-977b-account-create-update-d9c7b\" (UID: \"e60d2002-ca42-44ac-ac85-cb8c412a601e\") " pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.592649 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-operator-scripts\") pod \"nova-cell0-db-create-hctnc\" (UID: \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\") " pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.640933 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-1541-account-create-update-s9fdb"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.640974 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s42bb\" (UniqueName: \"kubernetes.io/projected/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-kube-api-access-s42bb\") pod \"nova-cell0-db-create-hctnc\" (UID: \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\") " pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.646550 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.649934 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.655896 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1541-account-create-update-s9fdb"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.667731 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.705119 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7vkk\" (UniqueName: \"kubernetes.io/projected/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-kube-api-access-q7vkk\") pod \"nova-cell1-db-create-2blsh\" (UID: \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\") " pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.705187 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q44xw\" (UniqueName: \"kubernetes.io/projected/e60d2002-ca42-44ac-ac85-cb8c412a601e-kube-api-access-q44xw\") pod \"nova-api-977b-account-create-update-d9c7b\" (UID: \"e60d2002-ca42-44ac-ac85-cb8c412a601e\") " pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.705447 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-operator-scripts\") pod \"nova-cell1-db-create-2blsh\" (UID: \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\") " pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.705491 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60d2002-ca42-44ac-ac85-cb8c412a601e-operator-scripts\") pod \"nova-api-977b-account-create-update-d9c7b\" (UID: \"e60d2002-ca42-44ac-ac85-cb8c412a601e\") " pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.706360 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60d2002-ca42-44ac-ac85-cb8c412a601e-operator-scripts\") pod \"nova-api-977b-account-create-update-d9c7b\" (UID: \"e60d2002-ca42-44ac-ac85-cb8c412a601e\") " pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.724220 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q44xw\" (UniqueName: \"kubernetes.io/projected/e60d2002-ca42-44ac-ac85-cb8c412a601e-kube-api-access-q44xw\") pod \"nova-api-977b-account-create-update-d9c7b\" (UID: \"e60d2002-ca42-44ac-ac85-cb8c412a601e\") " pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.759218 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.807275 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7vkk\" (UniqueName: \"kubernetes.io/projected/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-kube-api-access-q7vkk\") pod \"nova-cell1-db-create-2blsh\" (UID: \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\") " pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.807397 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-operator-scripts\") pod \"nova-cell1-db-create-2blsh\" (UID: \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\") " pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.807467 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6623940a-3fe5-4bbc-a157-c88d2271609e-operator-scripts\") pod \"nova-cell0-1541-account-create-update-s9fdb\" (UID: \"6623940a-3fe5-4bbc-a157-c88d2271609e\") " pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.807517 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4g7ln\" (UniqueName: \"kubernetes.io/projected/6623940a-3fe5-4bbc-a157-c88d2271609e-kube-api-access-4g7ln\") pod \"nova-cell0-1541-account-create-update-s9fdb\" (UID: \"6623940a-3fe5-4bbc-a157-c88d2271609e\") " pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.808099 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-operator-scripts\") pod \"nova-cell1-db-create-2blsh\" (UID: \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\") " pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.834360 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-fc55-account-create-update-tlw7g"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.835572 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.836207 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7vkk\" (UniqueName: \"kubernetes.io/projected/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-kube-api-access-q7vkk\") pod \"nova-cell1-db-create-2blsh\" (UID: \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\") " pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.838897 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.846583 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-fc55-account-create-update-tlw7g"] Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.864225 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.908999 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6623940a-3fe5-4bbc-a157-c88d2271609e-operator-scripts\") pod \"nova-cell0-1541-account-create-update-s9fdb\" (UID: \"6623940a-3fe5-4bbc-a157-c88d2271609e\") " pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.909074 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4g7ln\" (UniqueName: \"kubernetes.io/projected/6623940a-3fe5-4bbc-a157-c88d2271609e-kube-api-access-4g7ln\") pod \"nova-cell0-1541-account-create-update-s9fdb\" (UID: \"6623940a-3fe5-4bbc-a157-c88d2271609e\") " pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.910224 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6623940a-3fe5-4bbc-a157-c88d2271609e-operator-scripts\") pod \"nova-cell0-1541-account-create-update-s9fdb\" (UID: \"6623940a-3fe5-4bbc-a157-c88d2271609e\") " pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.935289 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4g7ln\" (UniqueName: \"kubernetes.io/projected/6623940a-3fe5-4bbc-a157-c88d2271609e-kube-api-access-4g7ln\") pod \"nova-cell0-1541-account-create-update-s9fdb\" (UID: \"6623940a-3fe5-4bbc-a157-c88d2271609e\") " pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:28 crc kubenswrapper[4665]: I1205 01:33:28.981890 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:29 crc kubenswrapper[4665]: I1205 01:33:29.010661 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmd6s\" (UniqueName: \"kubernetes.io/projected/de4c510d-7346-4bf0-8319-ed0473a10044-kube-api-access-lmd6s\") pod \"nova-cell1-fc55-account-create-update-tlw7g\" (UID: \"de4c510d-7346-4bf0-8319-ed0473a10044\") " pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:29 crc kubenswrapper[4665]: I1205 01:33:29.010753 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4c510d-7346-4bf0-8319-ed0473a10044-operator-scripts\") pod \"nova-cell1-fc55-account-create-update-tlw7g\" (UID: \"de4c510d-7346-4bf0-8319-ed0473a10044\") " pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:29 crc kubenswrapper[4665]: I1205 01:33:29.114403 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmd6s\" (UniqueName: \"kubernetes.io/projected/de4c510d-7346-4bf0-8319-ed0473a10044-kube-api-access-lmd6s\") pod \"nova-cell1-fc55-account-create-update-tlw7g\" (UID: \"de4c510d-7346-4bf0-8319-ed0473a10044\") " pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:29 crc kubenswrapper[4665]: I1205 01:33:29.114464 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4c510d-7346-4bf0-8319-ed0473a10044-operator-scripts\") pod \"nova-cell1-fc55-account-create-update-tlw7g\" (UID: \"de4c510d-7346-4bf0-8319-ed0473a10044\") " pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:29 crc kubenswrapper[4665]: I1205 01:33:29.115163 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4c510d-7346-4bf0-8319-ed0473a10044-operator-scripts\") pod \"nova-cell1-fc55-account-create-update-tlw7g\" (UID: \"de4c510d-7346-4bf0-8319-ed0473a10044\") " pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:29 crc kubenswrapper[4665]: I1205 01:33:29.131738 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmd6s\" (UniqueName: \"kubernetes.io/projected/de4c510d-7346-4bf0-8319-ed0473a10044-kube-api-access-lmd6s\") pod \"nova-cell1-fc55-account-create-update-tlw7g\" (UID: \"de4c510d-7346-4bf0-8319-ed0473a10044\") " pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:29 crc kubenswrapper[4665]: I1205 01:33:29.192728 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.362427 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.456426 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-977b-account-create-update-d9c7b"] Dec 05 01:33:31 crc kubenswrapper[4665]: W1205 01:33:31.457452 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode60d2002_ca42_44ac_ac85_cb8c412a601e.slice/crio-c21c209bf6ad80b2b35f31555698ceeb3103e8516351004972ba5f4b9548aebb WatchSource:0}: Error finding container c21c209bf6ad80b2b35f31555698ceeb3103e8516351004972ba5f4b9548aebb: Status 404 returned error can't find the container with id c21c209bf6ad80b2b35f31555698ceeb3103e8516351004972ba5f4b9548aebb Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.469761 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-scripts\") pod \"7d94611a-4825-4614-a16f-a206f227201c\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.469822 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-44mn2\" (UniqueName: \"kubernetes.io/projected/7d94611a-4825-4614-a16f-a206f227201c-kube-api-access-44mn2\") pod \"7d94611a-4825-4614-a16f-a206f227201c\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.469907 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-log-httpd\") pod \"7d94611a-4825-4614-a16f-a206f227201c\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.469967 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-config-data\") pod \"7d94611a-4825-4614-a16f-a206f227201c\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.469983 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-run-httpd\") pod \"7d94611a-4825-4614-a16f-a206f227201c\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.470014 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-combined-ca-bundle\") pod \"7d94611a-4825-4614-a16f-a206f227201c\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.470062 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-sg-core-conf-yaml\") pod \"7d94611a-4825-4614-a16f-a206f227201c\" (UID: \"7d94611a-4825-4614-a16f-a206f227201c\") " Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.471071 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "7d94611a-4825-4614-a16f-a206f227201c" (UID: "7d94611a-4825-4614-a16f-a206f227201c"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.471559 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "7d94611a-4825-4614-a16f-a206f227201c" (UID: "7d94611a-4825-4614-a16f-a206f227201c"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.479368 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-scripts" (OuterVolumeSpecName: "scripts") pod "7d94611a-4825-4614-a16f-a206f227201c" (UID: "7d94611a-4825-4614-a16f-a206f227201c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.487659 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d94611a-4825-4614-a16f-a206f227201c-kube-api-access-44mn2" (OuterVolumeSpecName: "kube-api-access-44mn2") pod "7d94611a-4825-4614-a16f-a206f227201c" (UID: "7d94611a-4825-4614-a16f-a206f227201c"). InnerVolumeSpecName "kube-api-access-44mn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.546897 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-hctnc"] Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.551381 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "7d94611a-4825-4614-a16f-a206f227201c" (UID: "7d94611a-4825-4614-a16f-a206f227201c"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:31 crc kubenswrapper[4665]: W1205 01:33:31.553007 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod785a409c_4cf0_4d32_a459_576b739f4b4b.slice/crio-2bc18c23f1fd98c60b324775d895c1734965c926fc802f04c15e67a69aede132 WatchSource:0}: Error finding container 2bc18c23f1fd98c60b324775d895c1734965c926fc802f04c15e67a69aede132: Status 404 returned error can't find the container with id 2bc18c23f1fd98c60b324775d895c1734965c926fc802f04c15e67a69aede132 Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.572181 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.572374 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-44mn2\" (UniqueName: \"kubernetes.io/projected/7d94611a-4825-4614-a16f-a206f227201c-kube-api-access-44mn2\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.572467 4665 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.572522 4665 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/7d94611a-4825-4614-a16f-a206f227201c-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.572573 4665 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.575552 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-whn5c"] Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.581986 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d94611a-4825-4614-a16f-a206f227201c" (UID: "7d94611a-4825-4614-a16f-a206f227201c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.617606 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-977b-account-create-update-d9c7b" event={"ID":"e60d2002-ca42-44ac-ac85-cb8c412a601e","Type":"ContainerStarted","Data":"c21c209bf6ad80b2b35f31555698ceeb3103e8516351004972ba5f4b9548aebb"} Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.630622 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-hctnc" event={"ID":"42f0aba2-52cb-4679-8fac-bdd74b0f9f82","Type":"ContainerStarted","Data":"4b583b64fcf4e5e779e14927f340e3072703b795673d2620bcc244e38b8d9896"} Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.632644 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-whn5c" event={"ID":"785a409c-4cf0-4d32-a459-576b739f4b4b","Type":"ContainerStarted","Data":"2bc18c23f1fd98c60b324775d895c1734965c926fc802f04c15e67a69aede132"} Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.635992 4665 generic.go:334] "Generic (PLEG): container finished" podID="7d94611a-4825-4614-a16f-a206f227201c" containerID="4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4" exitCode=0 Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.636039 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerDied","Data":"4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4"} Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.636066 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"7d94611a-4825-4614-a16f-a206f227201c","Type":"ContainerDied","Data":"f36b8314d065e504613ed5b278190bf0c4287604ad5b63c86e47d6ed883d266f"} Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.636088 4665 scope.go:117] "RemoveContainer" containerID="4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.636421 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.673200 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"a8461ae3-f75f-42de-b320-c9dc4b1545ec","Type":"ContainerStarted","Data":"6aa9cd85114f1af9362ad23dc2e503385394eb5157e7e456a904d98d0079f450"} Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.685884 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.702534 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-config-data" (OuterVolumeSpecName: "config-data") pod "7d94611a-4825-4614-a16f-a206f227201c" (UID: "7d94611a-4825-4614-a16f-a206f227201c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.752072 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.186111722 podStartE2EDuration="13.752050432s" podCreationTimestamp="2025-12-05 01:33:18 +0000 UTC" firstStartedPulling="2025-12-05 01:33:19.075956216 +0000 UTC m=+1374.415348515" lastFinishedPulling="2025-12-05 01:33:30.641894916 +0000 UTC m=+1385.981287225" observedRunningTime="2025-12-05 01:33:31.698755095 +0000 UTC m=+1387.038147394" watchObservedRunningTime="2025-12-05 01:33:31.752050432 +0000 UTC m=+1387.091442731" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.759598 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-2blsh"] Dec 05 01:33:31 crc kubenswrapper[4665]: W1205 01:33:31.772536 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podde4c510d_7346_4bf0_8319_ed0473a10044.slice/crio-69320314599b66216f79c25a32db1fa0e867ad9e85a2525b3088fee2b78b2597 WatchSource:0}: Error finding container 69320314599b66216f79c25a32db1fa0e867ad9e85a2525b3088fee2b78b2597: Status 404 returned error can't find the container with id 69320314599b66216f79c25a32db1fa0e867ad9e85a2525b3088fee2b78b2597 Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.789522 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d94611a-4825-4614-a16f-a206f227201c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.796226 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-1541-account-create-update-s9fdb"] Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.829404 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-fc55-account-create-update-tlw7g"] Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.895368 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5f4f69cd4c-jp87r"] Dec 05 01:33:31 crc kubenswrapper[4665]: I1205 01:33:31.921286 4665 scope.go:117] "RemoveContainer" containerID="896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.014954 4665 scope.go:117] "RemoveContainer" containerID="4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.085012 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.103047 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.115095 4665 scope.go:117] "RemoveContainer" containerID="6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.131174 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:32 crc kubenswrapper[4665]: E1205 01:33:32.131663 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="proxy-httpd" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.131686 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="proxy-httpd" Dec 05 01:33:32 crc kubenswrapper[4665]: E1205 01:33:32.131700 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="ceilometer-notification-agent" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.131706 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="ceilometer-notification-agent" Dec 05 01:33:32 crc kubenswrapper[4665]: E1205 01:33:32.131731 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="sg-core" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.131737 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="sg-core" Dec 05 01:33:32 crc kubenswrapper[4665]: E1205 01:33:32.132662 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="ceilometer-central-agent" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.132678 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="ceilometer-central-agent" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.132874 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="proxy-httpd" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.132890 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="ceilometer-notification-agent" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.132900 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="sg-core" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.132923 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d94611a-4825-4614-a16f-a206f227201c" containerName="ceilometer-central-agent" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.134988 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.138639 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.138854 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.141024 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.212204 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-config-data\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.212265 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t82qw\" (UniqueName: \"kubernetes.io/projected/0471ec06-ae39-4f60-923a-6d6d4635dbe1-kube-api-access-t82qw\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.212307 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-scripts\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.212366 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.212381 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-run-httpd\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.212402 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-log-httpd\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.212455 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.238133 4665 scope.go:117] "RemoveContainer" containerID="4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc" Dec 05 01:33:32 crc kubenswrapper[4665]: E1205 01:33:32.250555 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc\": container with ID starting with 4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc not found: ID does not exist" containerID="4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.250595 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc"} err="failed to get container status \"4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc\": rpc error: code = NotFound desc = could not find container \"4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc\": container with ID starting with 4457c6d5a79f93197f085d4a4c79bb01591fd03c6212a658e7c59d9bd3b52acc not found: ID does not exist" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.250621 4665 scope.go:117] "RemoveContainer" containerID="896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da" Dec 05 01:33:32 crc kubenswrapper[4665]: E1205 01:33:32.254444 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da\": container with ID starting with 896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da not found: ID does not exist" containerID="896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.254477 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da"} err="failed to get container status \"896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da\": rpc error: code = NotFound desc = could not find container \"896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da\": container with ID starting with 896fb5780a68d84fbb725d68bd752d3285994fc2f9ce0db24015809f860863da not found: ID does not exist" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.254498 4665 scope.go:117] "RemoveContainer" containerID="4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4" Dec 05 01:33:32 crc kubenswrapper[4665]: E1205 01:33:32.255059 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4\": container with ID starting with 4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4 not found: ID does not exist" containerID="4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.255077 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4"} err="failed to get container status \"4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4\": rpc error: code = NotFound desc = could not find container \"4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4\": container with ID starting with 4253b83787ee6cf2aa5df02f2ace19f5163a5db4a74f768443748deab80aeef4 not found: ID does not exist" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.255089 4665 scope.go:117] "RemoveContainer" containerID="6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62" Dec 05 01:33:32 crc kubenswrapper[4665]: E1205 01:33:32.256849 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62\": container with ID starting with 6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62 not found: ID does not exist" containerID="6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.256873 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62"} err="failed to get container status \"6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62\": rpc error: code = NotFound desc = could not find container \"6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62\": container with ID starting with 6d770426408c843cb273489cd8ca1dd8c5008e14fdd24275653525c976732f62 not found: ID does not exist" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.315559 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.315617 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-config-data\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.315651 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t82qw\" (UniqueName: \"kubernetes.io/projected/0471ec06-ae39-4f60-923a-6d6d4635dbe1-kube-api-access-t82qw\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.315676 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-scripts\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.315736 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.315753 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-run-httpd\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.315772 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-log-httpd\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.316200 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-log-httpd\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.318550 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-run-httpd\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.322516 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.326766 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.327098 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-scripts\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.338836 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t82qw\" (UniqueName: \"kubernetes.io/projected/0471ec06-ae39-4f60-923a-6d6d4635dbe1-kube-api-access-t82qw\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.339245 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-config-data\") pod \"ceilometer-0\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.532579 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.549427 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.619943 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9vn2\" (UniqueName: \"kubernetes.io/projected/47a3fd04-4de6-4517-b713-0dc980538bb7-kube-api-access-w9vn2\") pod \"47a3fd04-4de6-4517-b713-0dc980538bb7\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.620049 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a3fd04-4de6-4517-b713-0dc980538bb7-logs\") pod \"47a3fd04-4de6-4517-b713-0dc980538bb7\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.620079 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47a3fd04-4de6-4517-b713-0dc980538bb7-etc-machine-id\") pod \"47a3fd04-4de6-4517-b713-0dc980538bb7\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.620162 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data\") pod \"47a3fd04-4de6-4517-b713-0dc980538bb7\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.620220 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-scripts\") pod \"47a3fd04-4de6-4517-b713-0dc980538bb7\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.620274 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data-custom\") pod \"47a3fd04-4de6-4517-b713-0dc980538bb7\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.620325 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-combined-ca-bundle\") pod \"47a3fd04-4de6-4517-b713-0dc980538bb7\" (UID: \"47a3fd04-4de6-4517-b713-0dc980538bb7\") " Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.621177 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/47a3fd04-4de6-4517-b713-0dc980538bb7-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "47a3fd04-4de6-4517-b713-0dc980538bb7" (UID: "47a3fd04-4de6-4517-b713-0dc980538bb7"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.622167 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/47a3fd04-4de6-4517-b713-0dc980538bb7-logs" (OuterVolumeSpecName: "logs") pod "47a3fd04-4de6-4517-b713-0dc980538bb7" (UID: "47a3fd04-4de6-4517-b713-0dc980538bb7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.627934 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/47a3fd04-4de6-4517-b713-0dc980538bb7-kube-api-access-w9vn2" (OuterVolumeSpecName: "kube-api-access-w9vn2") pod "47a3fd04-4de6-4517-b713-0dc980538bb7" (UID: "47a3fd04-4de6-4517-b713-0dc980538bb7"). InnerVolumeSpecName "kube-api-access-w9vn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.641705 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-scripts" (OuterVolumeSpecName: "scripts") pod "47a3fd04-4de6-4517-b713-0dc980538bb7" (UID: "47a3fd04-4de6-4517-b713-0dc980538bb7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.653745 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "47a3fd04-4de6-4517-b713-0dc980538bb7" (UID: "47a3fd04-4de6-4517-b713-0dc980538bb7"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.723998 4665 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.724279 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9vn2\" (UniqueName: \"kubernetes.io/projected/47a3fd04-4de6-4517-b713-0dc980538bb7-kube-api-access-w9vn2\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.724305 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/47a3fd04-4de6-4517-b713-0dc980538bb7-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.724314 4665 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/47a3fd04-4de6-4517-b713-0dc980538bb7-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.724324 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.781139 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" event={"ID":"de4c510d-7346-4bf0-8319-ed0473a10044","Type":"ContainerStarted","Data":"8ea7773851d03fe16dcd97c9aa0f4587137c3dda61fe5b51a5285a0b32bc37a2"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.781180 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" event={"ID":"de4c510d-7346-4bf0-8319-ed0473a10044","Type":"ContainerStarted","Data":"69320314599b66216f79c25a32db1fa0e867ad9e85a2525b3088fee2b78b2597"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.782360 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data" (OuterVolumeSpecName: "config-data") pod "47a3fd04-4de6-4517-b713-0dc980538bb7" (UID: "47a3fd04-4de6-4517-b713-0dc980538bb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.805827 4665 generic.go:334] "Generic (PLEG): container finished" podID="d75b13ee-e8d3-4714-ad14-2b2c9cf993f9" containerID="9473e1fdb2fb5077ebf5ee7581dfe6695959730b26f42fb20affde5620abc60e" exitCode=0 Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.805915 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-2blsh" event={"ID":"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9","Type":"ContainerDied","Data":"9473e1fdb2fb5077ebf5ee7581dfe6695959730b26f42fb20affde5620abc60e"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.805945 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-2blsh" event={"ID":"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9","Type":"ContainerStarted","Data":"24c46fea24bccd6e67f943b705396893e03d0bc43cf28d6b4c248595b106e530"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.813822 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "47a3fd04-4de6-4517-b713-0dc980538bb7" (UID: "47a3fd04-4de6-4517-b713-0dc980538bb7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.825069 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.825103 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/47a3fd04-4de6-4517-b713-0dc980538bb7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.858823 4665 generic.go:334] "Generic (PLEG): container finished" podID="42f0aba2-52cb-4679-8fac-bdd74b0f9f82" containerID="dc8a70a5ae1fa6e539e77b472bf1ff6e4c1530c436e82901e3c7ee664b8af2b1" exitCode=0 Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.859004 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-hctnc" event={"ID":"42f0aba2-52cb-4679-8fac-bdd74b0f9f82","Type":"ContainerDied","Data":"dc8a70a5ae1fa6e539e77b472bf1ff6e4c1530c436e82901e3c7ee664b8af2b1"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.875849 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" podStartSLOduration=4.875828237 podStartE2EDuration="4.875828237s" podCreationTimestamp="2025-12-05 01:33:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:33:32.850816683 +0000 UTC m=+1388.190208982" watchObservedRunningTime="2025-12-05 01:33:32.875828237 +0000 UTC m=+1388.215220536" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.876416 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" event={"ID":"5cf42108-5fbb-4bb1-b941-b2181f99fa5c","Type":"ContainerStarted","Data":"12d2c358d6cc3d8219db99635760e7a0920b2274270c4fff02f7757bcceccff6"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.876457 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" event={"ID":"5cf42108-5fbb-4bb1-b941-b2181f99fa5c","Type":"ContainerStarted","Data":"c490d3abf876593f1535e047958a6cf06f43b7849d09d1481288e942682f4747"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.889780 4665 generic.go:334] "Generic (PLEG): container finished" podID="e60d2002-ca42-44ac-ac85-cb8c412a601e" containerID="1f9eae47a55e4fec4525aadfa8eef6df356e430cea52a1d825d7b6906be3944d" exitCode=0 Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.889845 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-977b-account-create-update-d9c7b" event={"ID":"e60d2002-ca42-44ac-ac85-cb8c412a601e","Type":"ContainerDied","Data":"1f9eae47a55e4fec4525aadfa8eef6df356e430cea52a1d825d7b6906be3944d"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.891879 4665 generic.go:334] "Generic (PLEG): container finished" podID="64407a72-3fdf-450f-b5c0-913ee74bb437" containerID="0d22edabd62a01b81060d36b7b04935aa12a247013b11f7437c9d25fbca25bbb" exitCode=137 Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.891930 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644f785f4-mslbg" event={"ID":"64407a72-3fdf-450f-b5c0-913ee74bb437","Type":"ContainerDied","Data":"0d22edabd62a01b81060d36b7b04935aa12a247013b11f7437c9d25fbca25bbb"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.895110 4665 generic.go:334] "Generic (PLEG): container finished" podID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerID="276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40" exitCode=137 Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.895174 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.918749 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d94611a-4825-4614-a16f-a206f227201c" path="/var/lib/kubelet/pods/7d94611a-4825-4614-a16f-a206f227201c/volumes" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.919869 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"47a3fd04-4de6-4517-b713-0dc980538bb7","Type":"ContainerDied","Data":"276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.921356 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"47a3fd04-4de6-4517-b713-0dc980538bb7","Type":"ContainerDied","Data":"3479f2b532b2b588cc85bb8a3884f27a2154e92e0b3f2bcd3f486844bc791fc2"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.921442 4665 scope.go:117] "RemoveContainer" containerID="276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40" Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.957622 4665 generic.go:334] "Generic (PLEG): container finished" podID="785a409c-4cf0-4d32-a459-576b739f4b4b" containerID="79328700a66ab6a5b2884d3dfa0013bf7e95b9107d75067b5d862cbdda7a6587" exitCode=0 Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.957707 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-whn5c" event={"ID":"785a409c-4cf0-4d32-a459-576b739f4b4b","Type":"ContainerDied","Data":"79328700a66ab6a5b2884d3dfa0013bf7e95b9107d75067b5d862cbdda7a6587"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.965015 4665 generic.go:334] "Generic (PLEG): container finished" podID="2178a916-adc5-4ff5-8972-30b105320f5f" containerID="5fee73ab6bf34e37ceb73d73cdc466ea8680df6b74400f1cd845b17c4378965f" exitCode=137 Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.965154 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-86cd4c9876-glfvx" event={"ID":"2178a916-adc5-4ff5-8972-30b105320f5f","Type":"ContainerDied","Data":"5fee73ab6bf34e37ceb73d73cdc466ea8680df6b74400f1cd845b17c4378965f"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.968474 4665 generic.go:334] "Generic (PLEG): container finished" podID="6623940a-3fe5-4bbc-a157-c88d2271609e" containerID="dcde7665c51229857c8d5871a4e63adba3479fb5bb1d317b9d296239aa69cba0" exitCode=0 Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.969264 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1541-account-create-update-s9fdb" event={"ID":"6623940a-3fe5-4bbc-a157-c88d2271609e","Type":"ContainerDied","Data":"dcde7665c51229857c8d5871a4e63adba3479fb5bb1d317b9d296239aa69cba0"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.969309 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1541-account-create-update-s9fdb" event={"ID":"6623940a-3fe5-4bbc-a157-c88d2271609e","Type":"ContainerStarted","Data":"f63b3c5c36d3abc3a8de461ede5c21070628e87f010988e54fa5cd245eb6ff2e"} Dec 05 01:33:32 crc kubenswrapper[4665]: I1205 01:33:32.979185 4665 scope.go:117] "RemoveContainer" containerID="049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733" Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.121805 4665 scope.go:117] "RemoveContainer" containerID="276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40" Dec 05 01:33:33 crc kubenswrapper[4665]: E1205 01:33:33.126479 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40\": container with ID starting with 276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40 not found: ID does not exist" containerID="276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40" Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.126649 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40"} err="failed to get container status \"276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40\": rpc error: code = NotFound desc = could not find container \"276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40\": container with ID starting with 276be87d32d91137448e5a3c4aa78b74393f79d68971a4021a0ad4fdb894eb40 not found: ID does not exist" Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.126753 4665 scope.go:117] "RemoveContainer" containerID="049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733" Dec 05 01:33:33 crc kubenswrapper[4665]: E1205 01:33:33.132454 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733\": container with ID starting with 049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733 not found: ID does not exist" containerID="049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733" Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.132520 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733"} err="failed to get container status \"049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733\": rpc error: code = NotFound desc = could not find container \"049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733\": container with ID starting with 049102bbb54ab2947365cf2fbc619e2ad284ccdcc0b950dc18f3f12f921c9733 not found: ID does not exist" Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.160917 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.161148 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerName="glance-log" containerID="cri-o://7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e" gracePeriod=30 Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.161637 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerName="glance-httpd" containerID="cri-o://1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e" gracePeriod=30 Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.476935 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:33 crc kubenswrapper[4665]: I1205 01:33:33.776907 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.000764 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" event={"ID":"5cf42108-5fbb-4bb1-b941-b2181f99fa5c","Type":"ContainerStarted","Data":"44bf7a3177497c59f9579f27e75d079c0f24d4ee7dd09407f61b3664fbf9ac82"} Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.001109 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.001135 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.005790 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerStarted","Data":"05ed3c26b705190a2be5797d0cbb75bcd581ef1cf1f58175dd297d176df75ef6"} Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.032633 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" podStartSLOduration=9.0326146 podStartE2EDuration="9.0326146s" podCreationTimestamp="2025-12-05 01:33:25 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:33:34.028603633 +0000 UTC m=+1389.367995932" watchObservedRunningTime="2025-12-05 01:33:34.0326146 +0000 UTC m=+1389.372006899" Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.034433 4665 generic.go:334] "Generic (PLEG): container finished" podID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerID="7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e" exitCode=143 Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.034533 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aba4deca-cc3e-4589-9f3a-f7149cf107c7","Type":"ContainerDied","Data":"7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e"} Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.043956 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-86cd4c9876-glfvx" event={"ID":"2178a916-adc5-4ff5-8972-30b105320f5f","Type":"ContainerStarted","Data":"dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada"} Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.078483 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" event={"ID":"de4c510d-7346-4bf0-8319-ed0473a10044","Type":"ContainerDied","Data":"8ea7773851d03fe16dcd97c9aa0f4587137c3dda61fe5b51a5285a0b32bc37a2"} Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.078469 4665 generic.go:334] "Generic (PLEG): container finished" podID="de4c510d-7346-4bf0-8319-ed0473a10044" containerID="8ea7773851d03fe16dcd97c9aa0f4587137c3dda61fe5b51a5285a0b32bc37a2" exitCode=0 Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.107391 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-644f785f4-mslbg" event={"ID":"64407a72-3fdf-450f-b5c0-913ee74bb437","Type":"ContainerStarted","Data":"2c83b5f87cf431603b663fc4242b5aa849e161905b4c00e3d5993afa578ab38e"} Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.526564 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.527119 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerName="glance-log" containerID="cri-o://9fc8786212208b83cbcc0907c0b995c9bc5120c0f00dbf8af527933b72c60446" gracePeriod=30 Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.527471 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerName="glance-httpd" containerID="cri-o://c5f3896195bb34b2a0ba50833c289f7f2245667ea8c0b75853e9bbfdd54f163f" gracePeriod=30 Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.653587 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.779038 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9ffr6\" (UniqueName: \"kubernetes.io/projected/785a409c-4cf0-4d32-a459-576b739f4b4b-kube-api-access-9ffr6\") pod \"785a409c-4cf0-4d32-a459-576b739f4b4b\" (UID: \"785a409c-4cf0-4d32-a459-576b739f4b4b\") " Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.780253 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/785a409c-4cf0-4d32-a459-576b739f4b4b-operator-scripts\") pod \"785a409c-4cf0-4d32-a459-576b739f4b4b\" (UID: \"785a409c-4cf0-4d32-a459-576b739f4b4b\") " Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.781444 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/785a409c-4cf0-4d32-a459-576b739f4b4b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "785a409c-4cf0-4d32-a459-576b739f4b4b" (UID: "785a409c-4cf0-4d32-a459-576b739f4b4b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.800337 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/785a409c-4cf0-4d32-a459-576b739f4b4b-kube-api-access-9ffr6" (OuterVolumeSpecName: "kube-api-access-9ffr6") pod "785a409c-4cf0-4d32-a459-576b739f4b4b" (UID: "785a409c-4cf0-4d32-a459-576b739f4b4b"). InnerVolumeSpecName "kube-api-access-9ffr6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.887012 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9ffr6\" (UniqueName: \"kubernetes.io/projected/785a409c-4cf0-4d32-a459-576b739f4b4b-kube-api-access-9ffr6\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.887282 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/785a409c-4cf0-4d32-a459-576b739f4b4b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:34 crc kubenswrapper[4665]: I1205 01:33:34.989624 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.016579 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.025484 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.056068 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.091073 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s42bb\" (UniqueName: \"kubernetes.io/projected/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-kube-api-access-s42bb\") pod \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\" (UID: \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.098470 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-kube-api-access-s42bb" (OuterVolumeSpecName: "kube-api-access-s42bb") pod "42f0aba2-52cb-4679-8fac-bdd74b0f9f82" (UID: "42f0aba2-52cb-4679-8fac-bdd74b0f9f82"). InnerVolumeSpecName "kube-api-access-s42bb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.128078 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerStarted","Data":"55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a"} Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.141881 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-1541-account-create-update-s9fdb" event={"ID":"6623940a-3fe5-4bbc-a157-c88d2271609e","Type":"ContainerDied","Data":"f63b3c5c36d3abc3a8de461ede5c21070628e87f010988e54fa5cd245eb6ff2e"} Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.141925 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f63b3c5c36d3abc3a8de461ede5c21070628e87f010988e54fa5cd245eb6ff2e" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.142000 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-1541-account-create-update-s9fdb" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.144513 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-977b-account-create-update-d9c7b" event={"ID":"e60d2002-ca42-44ac-ac85-cb8c412a601e","Type":"ContainerDied","Data":"c21c209bf6ad80b2b35f31555698ceeb3103e8516351004972ba5f4b9548aebb"} Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.144543 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c21c209bf6ad80b2b35f31555698ceeb3103e8516351004972ba5f4b9548aebb" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.144592 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-977b-account-create-update-d9c7b" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.146432 4665 generic.go:334] "Generic (PLEG): container finished" podID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerID="9fc8786212208b83cbcc0907c0b995c9bc5120c0f00dbf8af527933b72c60446" exitCode=143 Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.146483 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7b0e53d-dba2-440d-844e-dd4ca34f1895","Type":"ContainerDied","Data":"9fc8786212208b83cbcc0907c0b995c9bc5120c0f00dbf8af527933b72c60446"} Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.147926 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-hctnc" event={"ID":"42f0aba2-52cb-4679-8fac-bdd74b0f9f82","Type":"ContainerDied","Data":"4b583b64fcf4e5e779e14927f340e3072703b795673d2620bcc244e38b8d9896"} Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.147947 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4b583b64fcf4e5e779e14927f340e3072703b795673d2620bcc244e38b8d9896" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.147995 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-hctnc" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.149145 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-2blsh" event={"ID":"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9","Type":"ContainerDied","Data":"24c46fea24bccd6e67f943b705396893e03d0bc43cf28d6b4c248595b106e530"} Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.149161 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24c46fea24bccd6e67f943b705396893e03d0bc43cf28d6b4c248595b106e530" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.149202 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-2blsh" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.150413 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-whn5c" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.150443 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-whn5c" event={"ID":"785a409c-4cf0-4d32-a459-576b739f4b4b","Type":"ContainerDied","Data":"2bc18c23f1fd98c60b324775d895c1734965c926fc802f04c15e67a69aede132"} Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.150476 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2bc18c23f1fd98c60b324775d895c1734965c926fc802f04c15e67a69aede132" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.192504 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-operator-scripts\") pod \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\" (UID: \"42f0aba2-52cb-4679-8fac-bdd74b0f9f82\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.192603 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q7vkk\" (UniqueName: \"kubernetes.io/projected/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-kube-api-access-q7vkk\") pod \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\" (UID: \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.192636 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q44xw\" (UniqueName: \"kubernetes.io/projected/e60d2002-ca42-44ac-ac85-cb8c412a601e-kube-api-access-q44xw\") pod \"e60d2002-ca42-44ac-ac85-cb8c412a601e\" (UID: \"e60d2002-ca42-44ac-ac85-cb8c412a601e\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.192653 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60d2002-ca42-44ac-ac85-cb8c412a601e-operator-scripts\") pod \"e60d2002-ca42-44ac-ac85-cb8c412a601e\" (UID: \"e60d2002-ca42-44ac-ac85-cb8c412a601e\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.192669 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4g7ln\" (UniqueName: \"kubernetes.io/projected/6623940a-3fe5-4bbc-a157-c88d2271609e-kube-api-access-4g7ln\") pod \"6623940a-3fe5-4bbc-a157-c88d2271609e\" (UID: \"6623940a-3fe5-4bbc-a157-c88d2271609e\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.192685 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6623940a-3fe5-4bbc-a157-c88d2271609e-operator-scripts\") pod \"6623940a-3fe5-4bbc-a157-c88d2271609e\" (UID: \"6623940a-3fe5-4bbc-a157-c88d2271609e\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.192757 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-operator-scripts\") pod \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\" (UID: \"d75b13ee-e8d3-4714-ad14-2b2c9cf993f9\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.193045 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "42f0aba2-52cb-4679-8fac-bdd74b0f9f82" (UID: "42f0aba2-52cb-4679-8fac-bdd74b0f9f82"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.193119 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s42bb\" (UniqueName: \"kubernetes.io/projected/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-kube-api-access-s42bb\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.193137 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/42f0aba2-52cb-4679-8fac-bdd74b0f9f82-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.196697 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d75b13ee-e8d3-4714-ad14-2b2c9cf993f9" (UID: "d75b13ee-e8d3-4714-ad14-2b2c9cf993f9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.197533 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6623940a-3fe5-4bbc-a157-c88d2271609e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "6623940a-3fe5-4bbc-a157-c88d2271609e" (UID: "6623940a-3fe5-4bbc-a157-c88d2271609e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.197644 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-kube-api-access-q7vkk" (OuterVolumeSpecName: "kube-api-access-q7vkk") pod "d75b13ee-e8d3-4714-ad14-2b2c9cf993f9" (UID: "d75b13ee-e8d3-4714-ad14-2b2c9cf993f9"). InnerVolumeSpecName "kube-api-access-q7vkk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.198536 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e60d2002-ca42-44ac-ac85-cb8c412a601e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e60d2002-ca42-44ac-ac85-cb8c412a601e" (UID: "e60d2002-ca42-44ac-ac85-cb8c412a601e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.198942 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e60d2002-ca42-44ac-ac85-cb8c412a601e-kube-api-access-q44xw" (OuterVolumeSpecName: "kube-api-access-q44xw") pod "e60d2002-ca42-44ac-ac85-cb8c412a601e" (UID: "e60d2002-ca42-44ac-ac85-cb8c412a601e"). InnerVolumeSpecName "kube-api-access-q44xw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.205675 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6623940a-3fe5-4bbc-a157-c88d2271609e-kube-api-access-4g7ln" (OuterVolumeSpecName: "kube-api-access-4g7ln") pod "6623940a-3fe5-4bbc-a157-c88d2271609e" (UID: "6623940a-3fe5-4bbc-a157-c88d2271609e"). InnerVolumeSpecName "kube-api-access-4g7ln". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.295444 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.295775 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q7vkk\" (UniqueName: \"kubernetes.io/projected/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9-kube-api-access-q7vkk\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.295788 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e60d2002-ca42-44ac-ac85-cb8c412a601e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.295797 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q44xw\" (UniqueName: \"kubernetes.io/projected/e60d2002-ca42-44ac-ac85-cb8c412a601e-kube-api-access-q44xw\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.295806 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4g7ln\" (UniqueName: \"kubernetes.io/projected/6623940a-3fe5-4bbc-a157-c88d2271609e-kube-api-access-4g7ln\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.295815 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/6623940a-3fe5-4bbc-a157-c88d2271609e-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.542767 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.705467 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lmd6s\" (UniqueName: \"kubernetes.io/projected/de4c510d-7346-4bf0-8319-ed0473a10044-kube-api-access-lmd6s\") pod \"de4c510d-7346-4bf0-8319-ed0473a10044\" (UID: \"de4c510d-7346-4bf0-8319-ed0473a10044\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.705570 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4c510d-7346-4bf0-8319-ed0473a10044-operator-scripts\") pod \"de4c510d-7346-4bf0-8319-ed0473a10044\" (UID: \"de4c510d-7346-4bf0-8319-ed0473a10044\") " Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.706378 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/de4c510d-7346-4bf0-8319-ed0473a10044-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "de4c510d-7346-4bf0-8319-ed0473a10044" (UID: "de4c510d-7346-4bf0-8319-ed0473a10044"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.728640 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de4c510d-7346-4bf0-8319-ed0473a10044-kube-api-access-lmd6s" (OuterVolumeSpecName: "kube-api-access-lmd6s") pod "de4c510d-7346-4bf0-8319-ed0473a10044" (UID: "de4c510d-7346-4bf0-8319-ed0473a10044"). InnerVolumeSpecName "kube-api-access-lmd6s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.818367 4665 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/de4c510d-7346-4bf0-8319-ed0473a10044-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:35 crc kubenswrapper[4665]: I1205 01:33:35.818392 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lmd6s\" (UniqueName: \"kubernetes.io/projected/de4c510d-7346-4bf0-8319-ed0473a10044-kube-api-access-lmd6s\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:36 crc kubenswrapper[4665]: I1205 01:33:36.192058 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" event={"ID":"de4c510d-7346-4bf0-8319-ed0473a10044","Type":"ContainerDied","Data":"69320314599b66216f79c25a32db1fa0e867ad9e85a2525b3088fee2b78b2597"} Dec 05 01:33:36 crc kubenswrapper[4665]: I1205 01:33:36.192100 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="69320314599b66216f79c25a32db1fa0e867ad9e85a2525b3088fee2b78b2597" Dec 05 01:33:36 crc kubenswrapper[4665]: I1205 01:33:36.192174 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-fc55-account-create-update-tlw7g" Dec 05 01:33:36 crc kubenswrapper[4665]: I1205 01:33:36.198051 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerStarted","Data":"baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b"} Dec 05 01:33:36 crc kubenswrapper[4665]: I1205 01:33:36.198430 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerStarted","Data":"655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514"} Dec 05 01:33:36 crc kubenswrapper[4665]: I1205 01:33:36.990371 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.157116 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-public-tls-certs\") pod \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.157203 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-logs\") pod \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.157247 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vjxwc\" (UniqueName: \"kubernetes.io/projected/aba4deca-cc3e-4589-9f3a-f7149cf107c7-kube-api-access-vjxwc\") pod \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.157275 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-httpd-run\") pod \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.157383 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-scripts\") pod \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.157410 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.157500 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-combined-ca-bundle\") pod \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.157541 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-config-data\") pod \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\" (UID: \"aba4deca-cc3e-4589-9f3a-f7149cf107c7\") " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.158558 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-logs" (OuterVolumeSpecName: "logs") pod "aba4deca-cc3e-4589-9f3a-f7149cf107c7" (UID: "aba4deca-cc3e-4589-9f3a-f7149cf107c7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.161387 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "aba4deca-cc3e-4589-9f3a-f7149cf107c7" (UID: "aba4deca-cc3e-4589-9f3a-f7149cf107c7"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.166496 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "glance") pod "aba4deca-cc3e-4589-9f3a-f7149cf107c7" (UID: "aba4deca-cc3e-4589-9f3a-f7149cf107c7"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.168709 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aba4deca-cc3e-4589-9f3a-f7149cf107c7-kube-api-access-vjxwc" (OuterVolumeSpecName: "kube-api-access-vjxwc") pod "aba4deca-cc3e-4589-9f3a-f7149cf107c7" (UID: "aba4deca-cc3e-4589-9f3a-f7149cf107c7"). InnerVolumeSpecName "kube-api-access-vjxwc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.180435 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-scripts" (OuterVolumeSpecName: "scripts") pod "aba4deca-cc3e-4589-9f3a-f7149cf107c7" (UID: "aba4deca-cc3e-4589-9f3a-f7149cf107c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.220834 4665 generic.go:334] "Generic (PLEG): container finished" podID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerID="1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e" exitCode=0 Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.220873 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aba4deca-cc3e-4589-9f3a-f7149cf107c7","Type":"ContainerDied","Data":"1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e"} Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.220898 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"aba4deca-cc3e-4589-9f3a-f7149cf107c7","Type":"ContainerDied","Data":"d4d3541b87a2f83eaa3a665edaaa839cb0d187ceb12f564ede805c0b2310ece6"} Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.220914 4665 scope.go:117] "RemoveContainer" containerID="1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.221038 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.260415 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.260461 4665 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.260471 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.261096 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vjxwc\" (UniqueName: \"kubernetes.io/projected/aba4deca-cc3e-4589-9f3a-f7149cf107c7-kube-api-access-vjxwc\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.261141 4665 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/aba4deca-cc3e-4589-9f3a-f7149cf107c7-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.271756 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aba4deca-cc3e-4589-9f3a-f7149cf107c7" (UID: "aba4deca-cc3e-4589-9f3a-f7149cf107c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.272015 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-config-data" (OuterVolumeSpecName: "config-data") pod "aba4deca-cc3e-4589-9f3a-f7149cf107c7" (UID: "aba4deca-cc3e-4589-9f3a-f7149cf107c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.309112 4665 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.318377 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "aba4deca-cc3e-4589-9f3a-f7149cf107c7" (UID: "aba4deca-cc3e-4589-9f3a-f7149cf107c7"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.353605 4665 scope.go:117] "RemoveContainer" containerID="7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.362846 4665 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.363037 4665 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.363096 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.363151 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aba4deca-cc3e-4589-9f3a-f7149cf107c7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.387189 4665 scope.go:117] "RemoveContainer" containerID="1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.387702 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e\": container with ID starting with 1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e not found: ID does not exist" containerID="1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.387800 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e"} err="failed to get container status \"1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e\": rpc error: code = NotFound desc = could not find container \"1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e\": container with ID starting with 1bb7c51a311878a6a67ddc59dfde14100efee59a574cc3e1f77a41b71041ba2e not found: ID does not exist" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.387878 4665 scope.go:117] "RemoveContainer" containerID="7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.388121 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e\": container with ID starting with 7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e not found: ID does not exist" containerID="7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.388201 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e"} err="failed to get container status \"7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e\": rpc error: code = NotFound desc = could not find container \"7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e\": container with ID starting with 7952c4e41eb83c5e3a3313ab9b0feb795cd9423b3d6400142bfe223c9cf8ec8e not found: ID does not exist" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.555332 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.565408 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.593844 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594190 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="42f0aba2-52cb-4679-8fac-bdd74b0f9f82" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594206 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="42f0aba2-52cb-4679-8fac-bdd74b0f9f82" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594221 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerName="cinder-api" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594228 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerName="cinder-api" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594244 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e60d2002-ca42-44ac-ac85-cb8c412a601e" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594250 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e60d2002-ca42-44ac-ac85-cb8c412a601e" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594260 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="785a409c-4cf0-4d32-a459-576b739f4b4b" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594266 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="785a409c-4cf0-4d32-a459-576b739f4b4b" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594285 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerName="glance-log" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594307 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerName="glance-log" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594318 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de4c510d-7346-4bf0-8319-ed0473a10044" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594324 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="de4c510d-7346-4bf0-8319-ed0473a10044" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594337 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerName="glance-httpd" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594343 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerName="glance-httpd" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594358 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6623940a-3fe5-4bbc-a157-c88d2271609e" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594366 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="6623940a-3fe5-4bbc-a157-c88d2271609e" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594383 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d75b13ee-e8d3-4714-ad14-2b2c9cf993f9" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594391 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d75b13ee-e8d3-4714-ad14-2b2c9cf993f9" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: E1205 01:33:37.594406 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerName="cinder-api-log" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594412 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerName="cinder-api-log" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594584 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerName="glance-log" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594598 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerName="cinder-api" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594610 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="42f0aba2-52cb-4679-8fac-bdd74b0f9f82" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594618 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" containerName="cinder-api-log" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594625 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="de4c510d-7346-4bf0-8319-ed0473a10044" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594636 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" containerName="glance-httpd" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594643 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="e60d2002-ca42-44ac-ac85-cb8c412a601e" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594649 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="785a409c-4cf0-4d32-a459-576b739f4b4b" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594665 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="6623940a-3fe5-4bbc-a157-c88d2271609e" containerName="mariadb-account-create-update" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.594680 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="d75b13ee-e8d3-4714-ad14-2b2c9cf993f9" containerName="mariadb-database-create" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.596392 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.607824 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.613074 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.621157 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.769149 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.769190 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.769265 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-scripts\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.769311 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.769328 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-config-data\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.769360 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-logs\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.769412 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.769459 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txfrw\" (UniqueName: \"kubernetes.io/projected/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-kube-api-access-txfrw\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.870688 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.870941 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-config-data\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.871078 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-logs\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.871192 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.871357 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txfrw\" (UniqueName: \"kubernetes.io/projected/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-kube-api-access-txfrw\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.871588 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.871717 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.871725 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-logs\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.871013 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.871957 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.876629 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-scripts\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.885766 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.886466 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.886980 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-scripts\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.887360 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-config-data\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.906999 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txfrw\" (UniqueName: \"kubernetes.io/projected/75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9-kube-api-access-txfrw\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:37 crc kubenswrapper[4665]: I1205 01:33:37.911663 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"glance-default-external-api-0\" (UID: \"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9\") " pod="openstack/glance-default-external-api-0" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.213532 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.252051 4665 generic.go:334] "Generic (PLEG): container finished" podID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerID="c5f3896195bb34b2a0ba50833c289f7f2245667ea8c0b75853e9bbfdd54f163f" exitCode=0 Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.252137 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7b0e53d-dba2-440d-844e-dd4ca34f1895","Type":"ContainerDied","Data":"c5f3896195bb34b2a0ba50833c289f7f2245667ea8c0b75853e9bbfdd54f163f"} Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.566051 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.699603 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-scripts\") pod \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.699650 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-internal-tls-certs\") pod \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.699728 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-httpd-run\") pod \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.699744 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-config-data\") pod \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.699786 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-combined-ca-bundle\") pod \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.699842 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-logs\") pod \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.699880 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sq22f\" (UniqueName: \"kubernetes.io/projected/f7b0e53d-dba2-440d-844e-dd4ca34f1895-kube-api-access-sq22f\") pod \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.699907 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\" (UID: \"f7b0e53d-dba2-440d-844e-dd4ca34f1895\") " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.701327 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "f7b0e53d-dba2-440d-844e-dd4ca34f1895" (UID: "f7b0e53d-dba2-440d-844e-dd4ca34f1895"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.701400 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-logs" (OuterVolumeSpecName: "logs") pod "f7b0e53d-dba2-440d-844e-dd4ca34f1895" (UID: "f7b0e53d-dba2-440d-844e-dd4ca34f1895"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.711461 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "f7b0e53d-dba2-440d-844e-dd4ca34f1895" (UID: "f7b0e53d-dba2-440d-844e-dd4ca34f1895"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.711604 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f7b0e53d-dba2-440d-844e-dd4ca34f1895-kube-api-access-sq22f" (OuterVolumeSpecName: "kube-api-access-sq22f") pod "f7b0e53d-dba2-440d-844e-dd4ca34f1895" (UID: "f7b0e53d-dba2-440d-844e-dd4ca34f1895"). InnerVolumeSpecName "kube-api-access-sq22f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.718060 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-scripts" (OuterVolumeSpecName: "scripts") pod "f7b0e53d-dba2-440d-844e-dd4ca34f1895" (UID: "f7b0e53d-dba2-440d-844e-dd4ca34f1895"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.764137 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f7b0e53d-dba2-440d-844e-dd4ca34f1895" (UID: "f7b0e53d-dba2-440d-844e-dd4ca34f1895"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.794528 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-config-data" (OuterVolumeSpecName: "config-data") pod "f7b0e53d-dba2-440d-844e-dd4ca34f1895" (UID: "f7b0e53d-dba2-440d-844e-dd4ca34f1895"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.801507 4665 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.801535 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.801544 4665 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.801553 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.801561 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.801571 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f7b0e53d-dba2-440d-844e-dd4ca34f1895-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.801579 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sq22f\" (UniqueName: \"kubernetes.io/projected/f7b0e53d-dba2-440d-844e-dd4ca34f1895-kube-api-access-sq22f\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.802024 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "f7b0e53d-dba2-440d-844e-dd4ca34f1895" (UID: "f7b0e53d-dba2-440d-844e-dd4ca34f1895"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.827398 4665 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.910664 4665 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.911017 4665 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/f7b0e53d-dba2-440d-844e-dd4ca34f1895-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:38 crc kubenswrapper[4665]: I1205 01:33:38.940157 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aba4deca-cc3e-4589-9f3a-f7149cf107c7" path="/var/lib/kubelet/pods/aba4deca-cc3e-4589-9f3a-f7149cf107c7/volumes" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.052810 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.082551 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cclzq"] Dec 05 01:33:39 crc kubenswrapper[4665]: E1205 01:33:39.082929 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerName="glance-log" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.082943 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerName="glance-log" Dec 05 01:33:39 crc kubenswrapper[4665]: E1205 01:33:39.082971 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerName="glance-httpd" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.082978 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerName="glance-httpd" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.083147 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerName="glance-log" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.083164 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" containerName="glance-httpd" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.083734 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.106173 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-frk4r" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.108642 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.109336 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.162990 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cclzq"] Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.227762 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.227811 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-scripts\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.227862 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m8tzk\" (UniqueName: \"kubernetes.io/projected/4ff4178e-958a-443a-9dc6-dc354c11fa89-kube-api-access-m8tzk\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.227880 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-config-data\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.289392 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.289602 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"f7b0e53d-dba2-440d-844e-dd4ca34f1895","Type":"ContainerDied","Data":"a8d923f5dfdfd280fafb5de790ad914697f31e701d8e026628b045b2e57021fe"} Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.289688 4665 scope.go:117] "RemoveContainer" containerID="c5f3896195bb34b2a0ba50833c289f7f2245667ea8c0b75853e9bbfdd54f163f" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.293624 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9","Type":"ContainerStarted","Data":"f8251a9d263d72f8e81c264d7fb3f32a870477fe87c3fd08b081b901a4fc669e"} Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.296762 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerStarted","Data":"24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277"} Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.296905 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="ceilometer-central-agent" containerID="cri-o://55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a" gracePeriod=30 Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.296942 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.296990 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="ceilometer-notification-agent" containerID="cri-o://655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514" gracePeriod=30 Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.296967 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="sg-core" containerID="cri-o://baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b" gracePeriod=30 Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.297063 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="proxy-httpd" containerID="cri-o://24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277" gracePeriod=30 Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.322844 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.331361 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m8tzk\" (UniqueName: \"kubernetes.io/projected/4ff4178e-958a-443a-9dc6-dc354c11fa89-kube-api-access-m8tzk\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.331451 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-config-data\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.331628 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.331653 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-scripts\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.332574 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.357130 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-scripts\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.357217 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-config-data\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.360543 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.369118 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.651720095 podStartE2EDuration="7.369100585s" podCreationTimestamp="2025-12-05 01:33:32 +0000 UTC" firstStartedPulling="2025-12-05 01:33:33.479472829 +0000 UTC m=+1388.818865128" lastFinishedPulling="2025-12-05 01:33:38.196853319 +0000 UTC m=+1393.536245618" observedRunningTime="2025-12-05 01:33:39.347211895 +0000 UTC m=+1394.686604194" watchObservedRunningTime="2025-12-05 01:33:39.369100585 +0000 UTC m=+1394.708492884" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.369421 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.383468 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.395881 4665 scope.go:117] "RemoveContainer" containerID="9fc8786212208b83cbcc0907c0b995c9bc5120c0f00dbf8af527933b72c60446" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.396561 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.396807 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.414654 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m8tzk\" (UniqueName: \"kubernetes.io/projected/4ff4178e-958a-443a-9dc6-dc354c11fa89-kube-api-access-m8tzk\") pod \"nova-cell0-conductor-db-sync-cclzq\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.481838 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.485902 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.542050 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.542731 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.542762 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2nb4\" (UniqueName: \"kubernetes.io/projected/37c48d2d-df08-4684-8215-be918d17cdae-kube-api-access-g2nb4\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.543106 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37c48d2d-df08-4684-8215-be918d17cdae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.543252 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.543325 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.543414 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.543470 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37c48d2d-df08-4684-8215-be918d17cdae-logs\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.644705 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.644786 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.644847 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.644875 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37c48d2d-df08-4684-8215-be918d17cdae-logs\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.644991 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.645017 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.645039 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2nb4\" (UniqueName: \"kubernetes.io/projected/37c48d2d-df08-4684-8215-be918d17cdae-kube-api-access-g2nb4\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.645074 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37c48d2d-df08-4684-8215-be918d17cdae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.647857 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/37c48d2d-df08-4684-8215-be918d17cdae-logs\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.648323 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/37c48d2d-df08-4684-8215-be918d17cdae-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.648418 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.653258 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-scripts\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.656132 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-config-data\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.663880 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.665935 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/37c48d2d-df08-4684-8215-be918d17cdae-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.666925 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2nb4\" (UniqueName: \"kubernetes.io/projected/37c48d2d-df08-4684-8215-be918d17cdae-kube-api-access-g2nb4\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.678219 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-internal-api-0\" (UID: \"37c48d2d-df08-4684-8215-be918d17cdae\") " pod="openstack/glance-default-internal-api-0" Dec 05 01:33:39 crc kubenswrapper[4665]: I1205 01:33:39.816213 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.397780 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cclzq"] Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.405939 4665 generic.go:334] "Generic (PLEG): container finished" podID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerID="24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277" exitCode=0 Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.406217 4665 generic.go:334] "Generic (PLEG): container finished" podID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerID="baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b" exitCode=2 Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.406226 4665 generic.go:334] "Generic (PLEG): container finished" podID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerID="655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514" exitCode=0 Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.406265 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerDied","Data":"24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277"} Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.406313 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerDied","Data":"baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b"} Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.406323 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerDied","Data":"655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514"} Dec 05 01:33:40 crc kubenswrapper[4665]: W1205 01:33:40.414683 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4ff4178e_958a_443a_9dc6_dc354c11fa89.slice/crio-16b2aa4d49181119da8445d80f7946faf2f12afa3b7f8f7c9a3d5db00141e18e WatchSource:0}: Error finding container 16b2aa4d49181119da8445d80f7946faf2f12afa3b7f8f7c9a3d5db00141e18e: Status 404 returned error can't find the container with id 16b2aa4d49181119da8445d80f7946faf2f12afa3b7f8f7c9a3d5db00141e18e Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.596257 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.598755 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5f4f69cd4c-jp87r" Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.599707 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 01:33:40 crc kubenswrapper[4665]: W1205 01:33:40.610249 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod37c48d2d_df08_4684_8215_be918d17cdae.slice/crio-468bca64040764955abb144f3764c600d795458d0ab2eda65e744220780ffc20 WatchSource:0}: Error finding container 468bca64040764955abb144f3764c600d795458d0ab2eda65e744220780ffc20: Status 404 returned error can't find the container with id 468bca64040764955abb144f3764c600d795458d0ab2eda65e744220780ffc20 Dec 05 01:33:40 crc kubenswrapper[4665]: I1205 01:33:40.908410 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f7b0e53d-dba2-440d-844e-dd4ca34f1895" path="/var/lib/kubelet/pods/f7b0e53d-dba2-440d-844e-dd4ca34f1895/volumes" Dec 05 01:33:41 crc kubenswrapper[4665]: I1205 01:33:41.450566 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37c48d2d-df08-4684-8215-be918d17cdae","Type":"ContainerStarted","Data":"468bca64040764955abb144f3764c600d795458d0ab2eda65e744220780ffc20"} Dec 05 01:33:41 crc kubenswrapper[4665]: I1205 01:33:41.453257 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9","Type":"ContainerStarted","Data":"4d3e2114db366b1d364bf721dbf25fc841f5264a64eab7296a374b6122a1045e"} Dec 05 01:33:41 crc kubenswrapper[4665]: I1205 01:33:41.457491 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-cclzq" event={"ID":"4ff4178e-958a-443a-9dc6-dc354c11fa89","Type":"ContainerStarted","Data":"16b2aa4d49181119da8445d80f7946faf2f12afa3b7f8f7c9a3d5db00141e18e"} Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.342963 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.343380 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.472420 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37c48d2d-df08-4684-8215-be918d17cdae","Type":"ContainerStarted","Data":"b1ba8b4ebea63fc010fa5c628ba05107b6dea60013efc131a5a619f9a3778326"} Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.472469 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"37c48d2d-df08-4684-8215-be918d17cdae","Type":"ContainerStarted","Data":"a5ce71cd1d4c99c91f701be4368b29e6a7265f8246f41936ad18296881eddbc5"} Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.475757 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9","Type":"ContainerStarted","Data":"bb5b515a85988fb862bdd06475442d70b964c40974fedadaf01a6a51309cfde4"} Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.484222 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.484252 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.501168 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=3.50115394 podStartE2EDuration="3.50115394s" podCreationTimestamp="2025-12-05 01:33:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:33:42.494222743 +0000 UTC m=+1397.833615052" watchObservedRunningTime="2025-12-05 01:33:42.50115394 +0000 UTC m=+1397.840546239" Dec 05 01:33:42 crc kubenswrapper[4665]: I1205 01:33:42.517227 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=5.517209608 podStartE2EDuration="5.517209608s" podCreationTimestamp="2025-12-05 01:33:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:33:42.515245481 +0000 UTC m=+1397.854637780" watchObservedRunningTime="2025-12-05 01:33:42.517209608 +0000 UTC m=+1397.856601897" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.126207 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.237902 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-log-httpd\") pod \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.238245 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-config-data\") pod \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.238274 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-run-httpd\") pod \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.238397 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t82qw\" (UniqueName: \"kubernetes.io/projected/0471ec06-ae39-4f60-923a-6d6d4635dbe1-kube-api-access-t82qw\") pod \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.238456 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-combined-ca-bundle\") pod \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.238487 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-sg-core-conf-yaml\") pod \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.238590 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-scripts\") pod \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\" (UID: \"0471ec06-ae39-4f60-923a-6d6d4635dbe1\") " Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.238904 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "0471ec06-ae39-4f60-923a-6d6d4635dbe1" (UID: "0471ec06-ae39-4f60-923a-6d6d4635dbe1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.239359 4665 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.240026 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "0471ec06-ae39-4f60-923a-6d6d4635dbe1" (UID: "0471ec06-ae39-4f60-923a-6d6d4635dbe1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.253443 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-scripts" (OuterVolumeSpecName: "scripts") pod "0471ec06-ae39-4f60-923a-6d6d4635dbe1" (UID: "0471ec06-ae39-4f60-923a-6d6d4635dbe1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.254878 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0471ec06-ae39-4f60-923a-6d6d4635dbe1-kube-api-access-t82qw" (OuterVolumeSpecName: "kube-api-access-t82qw") pod "0471ec06-ae39-4f60-923a-6d6d4635dbe1" (UID: "0471ec06-ae39-4f60-923a-6d6d4635dbe1"). InnerVolumeSpecName "kube-api-access-t82qw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.282313 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "0471ec06-ae39-4f60-923a-6d6d4635dbe1" (UID: "0471ec06-ae39-4f60-923a-6d6d4635dbe1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.340501 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.340531 4665 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/0471ec06-ae39-4f60-923a-6d6d4635dbe1-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.340541 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t82qw\" (UniqueName: \"kubernetes.io/projected/0471ec06-ae39-4f60-923a-6d6d4635dbe1-kube-api-access-t82qw\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.340551 4665 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.362813 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0471ec06-ae39-4f60-923a-6d6d4635dbe1" (UID: "0471ec06-ae39-4f60-923a-6d6d4635dbe1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.433547 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-config-data" (OuterVolumeSpecName: "config-data") pod "0471ec06-ae39-4f60-923a-6d6d4635dbe1" (UID: "0471ec06-ae39-4f60-923a-6d6d4635dbe1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.442551 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.442589 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0471ec06-ae39-4f60-923a-6d6d4635dbe1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.500275 4665 generic.go:334] "Generic (PLEG): container finished" podID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerID="55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a" exitCode=0 Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.500372 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerDied","Data":"55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a"} Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.500432 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"0471ec06-ae39-4f60-923a-6d6d4635dbe1","Type":"ContainerDied","Data":"05ed3c26b705190a2be5797d0cbb75bcd581ef1cf1f58175dd297d176df75ef6"} Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.500457 4665 scope.go:117] "RemoveContainer" containerID="24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.500737 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.543133 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.565973 4665 scope.go:117] "RemoveContainer" containerID="baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.566443 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.619484 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:44 crc kubenswrapper[4665]: E1205 01:33:44.619881 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="ceilometer-central-agent" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.619899 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="ceilometer-central-agent" Dec 05 01:33:44 crc kubenswrapper[4665]: E1205 01:33:44.619911 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="proxy-httpd" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.619917 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="proxy-httpd" Dec 05 01:33:44 crc kubenswrapper[4665]: E1205 01:33:44.619944 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="ceilometer-notification-agent" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.619950 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="ceilometer-notification-agent" Dec 05 01:33:44 crc kubenswrapper[4665]: E1205 01:33:44.619960 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="sg-core" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.619966 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="sg-core" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.620131 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="ceilometer-central-agent" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.620144 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="proxy-httpd" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.620162 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="sg-core" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.620172 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" containerName="ceilometer-notification-agent" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.621974 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.624793 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.624979 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.632215 4665 scope.go:117] "RemoveContainer" containerID="655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.638199 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.663666 4665 scope.go:117] "RemoveContainer" containerID="55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.695524 4665 scope.go:117] "RemoveContainer" containerID="24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277" Dec 05 01:33:44 crc kubenswrapper[4665]: E1205 01:33:44.695918 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277\": container with ID starting with 24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277 not found: ID does not exist" containerID="24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.695995 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277"} err="failed to get container status \"24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277\": rpc error: code = NotFound desc = could not find container \"24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277\": container with ID starting with 24f8dfbb6e9b9bb8b4d402bf72ca78295df00af189694c290b90a95382f35277 not found: ID does not exist" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.696028 4665 scope.go:117] "RemoveContainer" containerID="baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b" Dec 05 01:33:44 crc kubenswrapper[4665]: E1205 01:33:44.696479 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b\": container with ID starting with baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b not found: ID does not exist" containerID="baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.696500 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b"} err="failed to get container status \"baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b\": rpc error: code = NotFound desc = could not find container \"baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b\": container with ID starting with baae0fb71e4a447ead02522b8a8afb7d81c58209a6198b31499cdbd58790096b not found: ID does not exist" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.696515 4665 scope.go:117] "RemoveContainer" containerID="655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514" Dec 05 01:33:44 crc kubenswrapper[4665]: E1205 01:33:44.696700 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514\": container with ID starting with 655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514 not found: ID does not exist" containerID="655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.696723 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514"} err="failed to get container status \"655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514\": rpc error: code = NotFound desc = could not find container \"655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514\": container with ID starting with 655450280d3ce5bee0f233147ea3ee5053017f29c63c86275db3b4ea81d3b514 not found: ID does not exist" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.696736 4665 scope.go:117] "RemoveContainer" containerID="55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a" Dec 05 01:33:44 crc kubenswrapper[4665]: E1205 01:33:44.696936 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a\": container with ID starting with 55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a not found: ID does not exist" containerID="55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.696960 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a"} err="failed to get container status \"55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a\": rpc error: code = NotFound desc = could not find container \"55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a\": container with ID starting with 55fee502257aeffa50ad3d6d72b95355576bb32f4163de8406e57cd76e44b47a not found: ID does not exist" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.750772 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-config-data\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.750840 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-scripts\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.750893 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.750998 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2gfph\" (UniqueName: \"kubernetes.io/projected/604e32db-4501-4f89-995b-c500764ab2c0-kube-api-access-2gfph\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.751074 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-run-httpd\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.751149 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.751211 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-log-httpd\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.853374 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-run-httpd\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.853636 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.853814 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-log-httpd\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.853943 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-config-data\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.854084 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-scripts\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.854232 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.854406 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2gfph\" (UniqueName: \"kubernetes.io/projected/604e32db-4501-4f89-995b-c500764ab2c0-kube-api-access-2gfph\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.854260 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-run-httpd\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.854789 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-log-httpd\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.861493 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.865721 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-config-data\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.871070 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.876735 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2gfph\" (UniqueName: \"kubernetes.io/projected/604e32db-4501-4f89-995b-c500764ab2c0-kube-api-access-2gfph\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.877417 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-scripts\") pod \"ceilometer-0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " pod="openstack/ceilometer-0" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.921935 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.922159 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.923104 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0471ec06-ae39-4f60-923a-6d6d4635dbe1" path="/var/lib/kubelet/pods/0471ec06-ae39-4f60-923a-6d6d4635dbe1/volumes" Dec 05 01:33:44 crc kubenswrapper[4665]: I1205 01:33:44.942982 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:33:48 crc kubenswrapper[4665]: I1205 01:33:48.214486 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 01:33:48 crc kubenswrapper[4665]: I1205 01:33:48.215783 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 01:33:48 crc kubenswrapper[4665]: I1205 01:33:48.257385 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 01:33:48 crc kubenswrapper[4665]: I1205 01:33:48.261908 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 01:33:48 crc kubenswrapper[4665]: I1205 01:33:48.543934 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 01:33:48 crc kubenswrapper[4665]: I1205 01:33:48.544255 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 01:33:49 crc kubenswrapper[4665]: I1205 01:33:49.817083 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:49 crc kubenswrapper[4665]: I1205 01:33:49.817120 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:49 crc kubenswrapper[4665]: I1205 01:33:49.884220 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:49 crc kubenswrapper[4665]: I1205 01:33:49.885146 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:50 crc kubenswrapper[4665]: I1205 01:33:50.307646 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:50 crc kubenswrapper[4665]: I1205 01:33:50.565952 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:50 crc kubenswrapper[4665]: I1205 01:33:50.566133 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:51 crc kubenswrapper[4665]: I1205 01:33:51.684937 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 01:33:51 crc kubenswrapper[4665]: I1205 01:33:51.685531 4665 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 01:33:51 crc kubenswrapper[4665]: I1205 01:33:51.843317 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 01:33:51 crc kubenswrapper[4665]: I1205 01:33:51.961830 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:33:52 crc kubenswrapper[4665]: I1205 01:33:52.345506 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-86cd4c9876-glfvx" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 05 01:33:52 crc kubenswrapper[4665]: I1205 01:33:52.486179 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-644f785f4-mslbg" podUID="64407a72-3fdf-450f-b5c0-913ee74bb437" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.149:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.149:8443: connect: connection refused" Dec 05 01:33:52 crc kubenswrapper[4665]: I1205 01:33:52.594994 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerStarted","Data":"3c0b419c053c341eb6cd4bbe4cf122cf2154fe883558c3f809f92c25017eb5f9"} Dec 05 01:33:52 crc kubenswrapper[4665]: I1205 01:33:52.597360 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-cclzq" event={"ID":"4ff4178e-958a-443a-9dc6-dc354c11fa89","Type":"ContainerStarted","Data":"c29d41fc6fc128c90014b25869450012b9718e0d40c2f09b76b54dc0855011bd"} Dec 05 01:33:52 crc kubenswrapper[4665]: I1205 01:33:52.597477 4665 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 01:33:52 crc kubenswrapper[4665]: I1205 01:33:52.597504 4665 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 01:33:52 crc kubenswrapper[4665]: I1205 01:33:52.614898 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-cclzq" podStartSLOduration=3.605366851 podStartE2EDuration="14.61488152s" podCreationTimestamp="2025-12-05 01:33:38 +0000 UTC" firstStartedPulling="2025-12-05 01:33:40.418955914 +0000 UTC m=+1395.758348213" lastFinishedPulling="2025-12-05 01:33:51.428470593 +0000 UTC m=+1406.767862882" observedRunningTime="2025-12-05 01:33:52.612381391 +0000 UTC m=+1407.951773690" watchObservedRunningTime="2025-12-05 01:33:52.61488152 +0000 UTC m=+1407.954273819" Dec 05 01:33:53 crc kubenswrapper[4665]: I1205 01:33:53.606122 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerStarted","Data":"c81224db8ef2d3f839612b5de41c757fa0d26bf249e3e4b30cf320f71cce2b63"} Dec 05 01:33:54 crc kubenswrapper[4665]: I1205 01:33:54.407771 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:54 crc kubenswrapper[4665]: I1205 01:33:54.408179 4665 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 01:33:54 crc kubenswrapper[4665]: I1205 01:33:54.418068 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 01:33:55 crc kubenswrapper[4665]: I1205 01:33:55.621630 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerStarted","Data":"c7a131ba233dca94c1d6e1dfb8c7d5fbf11af467fd8e901b4b759378d2c8283d"} Dec 05 01:33:56 crc kubenswrapper[4665]: I1205 01:33:56.637462 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerStarted","Data":"83252496c13bc65c37bb543e6425ef0bc3f0f1276093d717bcdf2dbddc0d244d"} Dec 05 01:33:58 crc kubenswrapper[4665]: I1205 01:33:58.661408 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerStarted","Data":"f8a17fee4900ea345eb58b62fb93e4fad5ee6ce2aa378454ab7ded28f94a515a"} Dec 05 01:33:58 crc kubenswrapper[4665]: I1205 01:33:58.661584 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="ceilometer-central-agent" containerID="cri-o://c81224db8ef2d3f839612b5de41c757fa0d26bf249e3e4b30cf320f71cce2b63" gracePeriod=30 Dec 05 01:33:58 crc kubenswrapper[4665]: I1205 01:33:58.661660 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="proxy-httpd" containerID="cri-o://f8a17fee4900ea345eb58b62fb93e4fad5ee6ce2aa378454ab7ded28f94a515a" gracePeriod=30 Dec 05 01:33:58 crc kubenswrapper[4665]: I1205 01:33:58.661598 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="sg-core" containerID="cri-o://83252496c13bc65c37bb543e6425ef0bc3f0f1276093d717bcdf2dbddc0d244d" gracePeriod=30 Dec 05 01:33:58 crc kubenswrapper[4665]: I1205 01:33:58.661653 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="ceilometer-notification-agent" containerID="cri-o://c7a131ba233dca94c1d6e1dfb8c7d5fbf11af467fd8e901b4b759378d2c8283d" gracePeriod=30 Dec 05 01:33:58 crc kubenswrapper[4665]: I1205 01:33:58.661851 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:33:58 crc kubenswrapper[4665]: I1205 01:33:58.702545 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=9.101013524 podStartE2EDuration="14.702527949s" podCreationTimestamp="2025-12-05 01:33:44 +0000 UTC" firstStartedPulling="2025-12-05 01:33:51.958543597 +0000 UTC m=+1407.297935896" lastFinishedPulling="2025-12-05 01:33:57.560058012 +0000 UTC m=+1412.899450321" observedRunningTime="2025-12-05 01:33:58.698337318 +0000 UTC m=+1414.037729617" watchObservedRunningTime="2025-12-05 01:33:58.702527949 +0000 UTC m=+1414.041920248" Dec 05 01:33:59 crc kubenswrapper[4665]: I1205 01:33:59.672975 4665 generic.go:334] "Generic (PLEG): container finished" podID="604e32db-4501-4f89-995b-c500764ab2c0" containerID="f8a17fee4900ea345eb58b62fb93e4fad5ee6ce2aa378454ab7ded28f94a515a" exitCode=0 Dec 05 01:33:59 crc kubenswrapper[4665]: I1205 01:33:59.673348 4665 generic.go:334] "Generic (PLEG): container finished" podID="604e32db-4501-4f89-995b-c500764ab2c0" containerID="83252496c13bc65c37bb543e6425ef0bc3f0f1276093d717bcdf2dbddc0d244d" exitCode=2 Dec 05 01:33:59 crc kubenswrapper[4665]: I1205 01:33:59.673362 4665 generic.go:334] "Generic (PLEG): container finished" podID="604e32db-4501-4f89-995b-c500764ab2c0" containerID="c7a131ba233dca94c1d6e1dfb8c7d5fbf11af467fd8e901b4b759378d2c8283d" exitCode=0 Dec 05 01:33:59 crc kubenswrapper[4665]: I1205 01:33:59.673044 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerDied","Data":"f8a17fee4900ea345eb58b62fb93e4fad5ee6ce2aa378454ab7ded28f94a515a"} Dec 05 01:33:59 crc kubenswrapper[4665]: I1205 01:33:59.673399 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerDied","Data":"83252496c13bc65c37bb543e6425ef0bc3f0f1276093d717bcdf2dbddc0d244d"} Dec 05 01:33:59 crc kubenswrapper[4665]: I1205 01:33:59.673418 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerDied","Data":"c7a131ba233dca94c1d6e1dfb8c7d5fbf11af467fd8e901b4b759378d2c8283d"} Dec 05 01:34:02 crc kubenswrapper[4665]: I1205 01:34:02.344905 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-86cd4c9876-glfvx" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.148:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.148:8443: connect: connection refused" Dec 05 01:34:02 crc kubenswrapper[4665]: I1205 01:34:02.957901 4665 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","pod47a3fd04-4de6-4517-b713-0dc980538bb7"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort pod47a3fd04-4de6-4517-b713-0dc980538bb7] : Timed out while waiting for systemd to remove kubepods-besteffort-pod47a3fd04_4de6_4517_b713_0dc980538bb7.slice" Dec 05 01:34:02 crc kubenswrapper[4665]: E1205 01:34:02.957953 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort pod47a3fd04-4de6-4517-b713-0dc980538bb7] : unable to destroy cgroup paths for cgroup [kubepods besteffort pod47a3fd04-4de6-4517-b713-0dc980538bb7] : Timed out while waiting for systemd to remove kubepods-besteffort-pod47a3fd04_4de6_4517_b713_0dc980538bb7.slice" pod="openstack/cinder-api-0" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.711361 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.736200 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.745352 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.763991 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.766113 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.768810 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.769202 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.771809 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.779848 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.931553 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.931655 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.931753 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-config-data-custom\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.931801 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/84d298f5-1785-45d9-8195-ae1ba82c398a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.931919 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-config-data\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.931947 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.931990 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84d298f5-1785-45d9-8195-ae1ba82c398a-logs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.932015 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7bhvw\" (UniqueName: \"kubernetes.io/projected/84d298f5-1785-45d9-8195-ae1ba82c398a-kube-api-access-7bhvw\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:03 crc kubenswrapper[4665]: I1205 01:34:03.932074 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-scripts\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.033879 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-config-data\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.034217 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.034418 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84d298f5-1785-45d9-8195-ae1ba82c398a-logs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.034480 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7bhvw\" (UniqueName: \"kubernetes.io/projected/84d298f5-1785-45d9-8195-ae1ba82c398a-kube-api-access-7bhvw\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.034527 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-scripts\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.034793 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.034884 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.034964 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-config-data-custom\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.035059 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/84d298f5-1785-45d9-8195-ae1ba82c398a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.035190 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/84d298f5-1785-45d9-8195-ae1ba82c398a-etc-machine-id\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.037584 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/84d298f5-1785-45d9-8195-ae1ba82c398a-logs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.040350 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-config-data-custom\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.040564 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-config-data\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.040699 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-scripts\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.042443 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.048835 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-public-tls-certs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.064858 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84d298f5-1785-45d9-8195-ae1ba82c398a-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.069375 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7bhvw\" (UniqueName: \"kubernetes.io/projected/84d298f5-1785-45d9-8195-ae1ba82c398a-kube-api-access-7bhvw\") pod \"cinder-api-0\" (UID: \"84d298f5-1785-45d9-8195-ae1ba82c398a\") " pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.085433 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.611383 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.721943 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"84d298f5-1785-45d9-8195-ae1ba82c398a","Type":"ContainerStarted","Data":"afbb882338a8dcda09a983e1d27661b0a62a6d85d96e238d34977082379bff5e"} Dec 05 01:34:04 crc kubenswrapper[4665]: I1205 01:34:04.910895 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="47a3fd04-4de6-4517-b713-0dc980538bb7" path="/var/lib/kubelet/pods/47a3fd04-4de6-4517-b713-0dc980538bb7/volumes" Dec 05 01:34:05 crc kubenswrapper[4665]: I1205 01:34:05.425817 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:34:05 crc kubenswrapper[4665]: I1205 01:34:05.758901 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"84d298f5-1785-45d9-8195-ae1ba82c398a","Type":"ContainerStarted","Data":"b548b3f5934eec30e1f1b2116c986b2050d12a3769fdfe358ec0631b9cec3f07"} Dec 05 01:34:06 crc kubenswrapper[4665]: I1205 01:34:06.775476 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerDied","Data":"c81224db8ef2d3f839612b5de41c757fa0d26bf249e3e4b30cf320f71cce2b63"} Dec 05 01:34:06 crc kubenswrapper[4665]: I1205 01:34:06.775503 4665 generic.go:334] "Generic (PLEG): container finished" podID="604e32db-4501-4f89-995b-c500764ab2c0" containerID="c81224db8ef2d3f839612b5de41c757fa0d26bf249e3e4b30cf320f71cce2b63" exitCode=0 Dec 05 01:34:06 crc kubenswrapper[4665]: I1205 01:34:06.782256 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"84d298f5-1785-45d9-8195-ae1ba82c398a","Type":"ContainerStarted","Data":"5d2ea9386d05ace368f6da040768990a8885d67de4807708581c59f28c32979f"} Dec 05 01:34:06 crc kubenswrapper[4665]: I1205 01:34:06.782605 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 01:34:06 crc kubenswrapper[4665]: I1205 01:34:06.789183 4665 generic.go:334] "Generic (PLEG): container finished" podID="4ff4178e-958a-443a-9dc6-dc354c11fa89" containerID="c29d41fc6fc128c90014b25869450012b9718e0d40c2f09b76b54dc0855011bd" exitCode=0 Dec 05 01:34:06 crc kubenswrapper[4665]: I1205 01:34:06.789246 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-cclzq" event={"ID":"4ff4178e-958a-443a-9dc6-dc354c11fa89","Type":"ContainerDied","Data":"c29d41fc6fc128c90014b25869450012b9718e0d40c2f09b76b54dc0855011bd"} Dec 05 01:34:06 crc kubenswrapper[4665]: I1205 01:34:06.851059 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.851038978 podStartE2EDuration="3.851038978s" podCreationTimestamp="2025-12-05 01:34:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:34:06.811601136 +0000 UTC m=+1422.150993435" watchObservedRunningTime="2025-12-05 01:34:06.851038978 +0000 UTC m=+1422.190431277" Dec 05 01:34:06 crc kubenswrapper[4665]: E1205 01:34:06.978209 4665 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod604e32db_4501_4f89_995b_c500764ab2c0.slice/crio-conmon-c81224db8ef2d3f839612b5de41c757fa0d26bf249e3e4b30cf320f71cce2b63.scope\": RecentStats: unable to find data in memory cache]" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.084786 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.216484 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2gfph\" (UniqueName: \"kubernetes.io/projected/604e32db-4501-4f89-995b-c500764ab2c0-kube-api-access-2gfph\") pod \"604e32db-4501-4f89-995b-c500764ab2c0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.216553 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-combined-ca-bundle\") pod \"604e32db-4501-4f89-995b-c500764ab2c0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.216578 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-sg-core-conf-yaml\") pod \"604e32db-4501-4f89-995b-c500764ab2c0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.216672 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-log-httpd\") pod \"604e32db-4501-4f89-995b-c500764ab2c0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.216761 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-run-httpd\") pod \"604e32db-4501-4f89-995b-c500764ab2c0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.216869 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-config-data\") pod \"604e32db-4501-4f89-995b-c500764ab2c0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.216909 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-scripts\") pod \"604e32db-4501-4f89-995b-c500764ab2c0\" (UID: \"604e32db-4501-4f89-995b-c500764ab2c0\") " Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.217161 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "604e32db-4501-4f89-995b-c500764ab2c0" (UID: "604e32db-4501-4f89-995b-c500764ab2c0"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.217323 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "604e32db-4501-4f89-995b-c500764ab2c0" (UID: "604e32db-4501-4f89-995b-c500764ab2c0"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.217541 4665 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.217561 4665 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/604e32db-4501-4f89-995b-c500764ab2c0-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.227618 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-scripts" (OuterVolumeSpecName: "scripts") pod "604e32db-4501-4f89-995b-c500764ab2c0" (UID: "604e32db-4501-4f89-995b-c500764ab2c0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.243539 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/604e32db-4501-4f89-995b-c500764ab2c0-kube-api-access-2gfph" (OuterVolumeSpecName: "kube-api-access-2gfph") pod "604e32db-4501-4f89-995b-c500764ab2c0" (UID: "604e32db-4501-4f89-995b-c500764ab2c0"). InnerVolumeSpecName "kube-api-access-2gfph". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.252195 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "604e32db-4501-4f89-995b-c500764ab2c0" (UID: "604e32db-4501-4f89-995b-c500764ab2c0"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.292620 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "604e32db-4501-4f89-995b-c500764ab2c0" (UID: "604e32db-4501-4f89-995b-c500764ab2c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.319204 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2gfph\" (UniqueName: \"kubernetes.io/projected/604e32db-4501-4f89-995b-c500764ab2c0-kube-api-access-2gfph\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.319231 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.319242 4665 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.319251 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.336655 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-config-data" (OuterVolumeSpecName: "config-data") pod "604e32db-4501-4f89-995b-c500764ab2c0" (UID: "604e32db-4501-4f89-995b-c500764ab2c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.379403 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-644f785f4-mslbg" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.420695 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/604e32db-4501-4f89-995b-c500764ab2c0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.449704 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-86cd4c9876-glfvx"] Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.449937 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-86cd4c9876-glfvx" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon-log" containerID="cri-o://0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31" gracePeriod=30 Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.450327 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-86cd4c9876-glfvx" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" containerID="cri-o://dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada" gracePeriod=30 Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.801285 4665 generic.go:334] "Generic (PLEG): container finished" podID="2178a916-adc5-4ff5-8972-30b105320f5f" containerID="dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada" exitCode=0 Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.801344 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-86cd4c9876-glfvx" event={"ID":"2178a916-adc5-4ff5-8972-30b105320f5f","Type":"ContainerDied","Data":"dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada"} Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.801671 4665 scope.go:117] "RemoveContainer" containerID="5fee73ab6bf34e37ceb73d73cdc466ea8680df6b74400f1cd845b17c4378965f" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.804945 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.804943 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"604e32db-4501-4f89-995b-c500764ab2c0","Type":"ContainerDied","Data":"3c0b419c053c341eb6cd4bbe4cf122cf2154fe883558c3f809f92c25017eb5f9"} Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.847989 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.865719 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.879553 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:07 crc kubenswrapper[4665]: E1205 01:34:07.879958 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="ceilometer-notification-agent" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.879973 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="ceilometer-notification-agent" Dec 05 01:34:07 crc kubenswrapper[4665]: E1205 01:34:07.879987 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="proxy-httpd" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.879993 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="proxy-httpd" Dec 05 01:34:07 crc kubenswrapper[4665]: E1205 01:34:07.880015 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="ceilometer-central-agent" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.880021 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="ceilometer-central-agent" Dec 05 01:34:07 crc kubenswrapper[4665]: E1205 01:34:07.880032 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="sg-core" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.880040 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="sg-core" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.880221 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="proxy-httpd" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.880249 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="ceilometer-central-agent" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.880264 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="ceilometer-notification-agent" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.880275 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="604e32db-4501-4f89-995b-c500764ab2c0" containerName="sg-core" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.881967 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.885757 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.889590 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:34:07 crc kubenswrapper[4665]: I1205 01:34:07.890093 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.034244 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.035283 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.036513 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-scripts\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.036585 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-config-data\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.036639 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-log-httpd\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.036675 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qqpj\" (UniqueName: \"kubernetes.io/projected/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-kube-api-access-6qqpj\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.036696 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-run-httpd\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.036827 4665 scope.go:117] "RemoveContainer" containerID="f8a17fee4900ea345eb58b62fb93e4fad5ee6ce2aa378454ab7ded28f94a515a" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.063257 4665 scope.go:117] "RemoveContainer" containerID="83252496c13bc65c37bb543e6425ef0bc3f0f1276093d717bcdf2dbddc0d244d" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.111239 4665 scope.go:117] "RemoveContainer" containerID="c7a131ba233dca94c1d6e1dfb8c7d5fbf11af467fd8e901b4b759378d2c8283d" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.138874 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-scripts\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.138913 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-config-data\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.138939 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-log-httpd\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.138959 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qqpj\" (UniqueName: \"kubernetes.io/projected/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-kube-api-access-6qqpj\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.138977 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-run-httpd\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.139032 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.139067 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.140250 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-log-httpd\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.143695 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-run-httpd\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.146089 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-scripts\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.149829 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-config-data\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.150726 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.157380 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.167032 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qqpj\" (UniqueName: \"kubernetes.io/projected/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-kube-api-access-6qqpj\") pod \"ceilometer-0\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.219508 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.224405 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.230921 4665 scope.go:117] "RemoveContainer" containerID="c81224db8ef2d3f839612b5de41c757fa0d26bf249e3e4b30cf320f71cce2b63" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.340837 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-config-data\") pod \"4ff4178e-958a-443a-9dc6-dc354c11fa89\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.340994 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m8tzk\" (UniqueName: \"kubernetes.io/projected/4ff4178e-958a-443a-9dc6-dc354c11fa89-kube-api-access-m8tzk\") pod \"4ff4178e-958a-443a-9dc6-dc354c11fa89\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.341035 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-combined-ca-bundle\") pod \"4ff4178e-958a-443a-9dc6-dc354c11fa89\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.341068 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-scripts\") pod \"4ff4178e-958a-443a-9dc6-dc354c11fa89\" (UID: \"4ff4178e-958a-443a-9dc6-dc354c11fa89\") " Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.346633 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-scripts" (OuterVolumeSpecName: "scripts") pod "4ff4178e-958a-443a-9dc6-dc354c11fa89" (UID: "4ff4178e-958a-443a-9dc6-dc354c11fa89"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.358511 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ff4178e-958a-443a-9dc6-dc354c11fa89-kube-api-access-m8tzk" (OuterVolumeSpecName: "kube-api-access-m8tzk") pod "4ff4178e-958a-443a-9dc6-dc354c11fa89" (UID: "4ff4178e-958a-443a-9dc6-dc354c11fa89"). InnerVolumeSpecName "kube-api-access-m8tzk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.377230 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-config-data" (OuterVolumeSpecName: "config-data") pod "4ff4178e-958a-443a-9dc6-dc354c11fa89" (UID: "4ff4178e-958a-443a-9dc6-dc354c11fa89"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.388404 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4ff4178e-958a-443a-9dc6-dc354c11fa89" (UID: "4ff4178e-958a-443a-9dc6-dc354c11fa89"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.445489 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.445524 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m8tzk\" (UniqueName: \"kubernetes.io/projected/4ff4178e-958a-443a-9dc6-dc354c11fa89-kube-api-access-m8tzk\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.445533 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.445541 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4ff4178e-958a-443a-9dc6-dc354c11fa89-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.738945 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:08 crc kubenswrapper[4665]: W1205 01:34:08.742950 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5860fa6e_abda_4cc8_813b_c3a5a1e0c0a9.slice/crio-2ec06270bd44d582f2c6a4045ebd784283546c58b2a0a39be0d4f6a0d3752789 WatchSource:0}: Error finding container 2ec06270bd44d582f2c6a4045ebd784283546c58b2a0a39be0d4f6a0d3752789: Status 404 returned error can't find the container with id 2ec06270bd44d582f2c6a4045ebd784283546c58b2a0a39be0d4f6a0d3752789 Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.746453 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.818273 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-cclzq" event={"ID":"4ff4178e-958a-443a-9dc6-dc354c11fa89","Type":"ContainerDied","Data":"16b2aa4d49181119da8445d80f7946faf2f12afa3b7f8f7c9a3d5db00141e18e"} Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.819229 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="16b2aa4d49181119da8445d80f7946faf2f12afa3b7f8f7c9a3d5db00141e18e" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.819385 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-cclzq" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.828818 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerStarted","Data":"2ec06270bd44d582f2c6a4045ebd784283546c58b2a0a39be0d4f6a0d3752789"} Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.904700 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="604e32db-4501-4f89-995b-c500764ab2c0" path="/var/lib/kubelet/pods/604e32db-4501-4f89-995b-c500764ab2c0/volumes" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.928276 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:08 crc kubenswrapper[4665]: E1205 01:34:08.928853 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ff4178e-958a-443a-9dc6-dc354c11fa89" containerName="nova-cell0-conductor-db-sync" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.928871 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ff4178e-958a-443a-9dc6-dc354c11fa89" containerName="nova-cell0-conductor-db-sync" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.929085 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ff4178e-958a-443a-9dc6-dc354c11fa89" containerName="nova-cell0-conductor-db-sync" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.929686 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.938141 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.938217 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:08 crc kubenswrapper[4665]: I1205 01:34:08.942318 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-frk4r" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.056623 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bvndj\" (UniqueName: \"kubernetes.io/projected/5803d957-267e-409c-914d-2b49c318aca6-kube-api-access-bvndj\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.056733 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.056763 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.158626 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.158883 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.158991 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bvndj\" (UniqueName: \"kubernetes.io/projected/5803d957-267e-409c-914d-2b49c318aca6-kube-api-access-bvndj\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.164096 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.179595 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.180193 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bvndj\" (UniqueName: \"kubernetes.io/projected/5803d957-267e-409c-914d-2b49c318aca6-kube-api-access-bvndj\") pod \"nova-cell0-conductor-0\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.247918 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.714133 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:09 crc kubenswrapper[4665]: W1205 01:34:09.718539 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5803d957_267e_409c_914d_2b49c318aca6.slice/crio-e8db75c9d97f9256c4d7e12de901c759d155e9d7ece3f4561230841c42695189 WatchSource:0}: Error finding container e8db75c9d97f9256c4d7e12de901c759d155e9d7ece3f4561230841c42695189: Status 404 returned error can't find the container with id e8db75c9d97f9256c4d7e12de901c759d155e9d7ece3f4561230841c42695189 Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.806648 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.840230 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerStarted","Data":"d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f"} Dec 05 01:34:09 crc kubenswrapper[4665]: I1205 01:34:09.842195 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"5803d957-267e-409c-914d-2b49c318aca6","Type":"ContainerStarted","Data":"e8db75c9d97f9256c4d7e12de901c759d155e9d7ece3f4561230841c42695189"} Dec 05 01:34:10 crc kubenswrapper[4665]: I1205 01:34:10.448248 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:10 crc kubenswrapper[4665]: I1205 01:34:10.853960 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"5803d957-267e-409c-914d-2b49c318aca6","Type":"ContainerStarted","Data":"a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3"} Dec 05 01:34:10 crc kubenswrapper[4665]: I1205 01:34:10.854022 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell0-conductor-0" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" containerID="cri-o://a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" gracePeriod=30 Dec 05 01:34:10 crc kubenswrapper[4665]: I1205 01:34:10.854269 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:10 crc kubenswrapper[4665]: I1205 01:34:10.857871 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerStarted","Data":"a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786"} Dec 05 01:34:10 crc kubenswrapper[4665]: I1205 01:34:10.877937 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.877915219 podStartE2EDuration="2.877915219s" podCreationTimestamp="2025-12-05 01:34:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:34:10.874183169 +0000 UTC m=+1426.213575468" watchObservedRunningTime="2025-12-05 01:34:10.877915219 +0000 UTC m=+1426.217307518" Dec 05 01:34:11 crc kubenswrapper[4665]: I1205 01:34:11.867411 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerStarted","Data":"94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9"} Dec 05 01:34:12 crc kubenswrapper[4665]: I1205 01:34:12.876750 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerStarted","Data":"4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0"} Dec 05 01:34:12 crc kubenswrapper[4665]: I1205 01:34:12.877304 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:34:12 crc kubenswrapper[4665]: I1205 01:34:12.876930 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="proxy-httpd" containerID="cri-o://4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0" gracePeriod=30 Dec 05 01:34:12 crc kubenswrapper[4665]: I1205 01:34:12.876932 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="sg-core" containerID="cri-o://94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9" gracePeriod=30 Dec 05 01:34:12 crc kubenswrapper[4665]: I1205 01:34:12.877031 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="ceilometer-notification-agent" containerID="cri-o://a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786" gracePeriod=30 Dec 05 01:34:12 crc kubenswrapper[4665]: I1205 01:34:12.876883 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="ceilometer-central-agent" containerID="cri-o://d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f" gracePeriod=30 Dec 05 01:34:12 crc kubenswrapper[4665]: I1205 01:34:12.915408 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.744097867 podStartE2EDuration="5.91538372s" podCreationTimestamp="2025-12-05 01:34:07 +0000 UTC" firstStartedPulling="2025-12-05 01:34:08.746057063 +0000 UTC m=+1424.085449402" lastFinishedPulling="2025-12-05 01:34:11.917342966 +0000 UTC m=+1427.256735255" observedRunningTime="2025-12-05 01:34:12.909232562 +0000 UTC m=+1428.248624861" watchObservedRunningTime="2025-12-05 01:34:12.91538372 +0000 UTC m=+1428.254776029" Dec 05 01:34:13 crc kubenswrapper[4665]: I1205 01:34:13.891605 4665 generic.go:334] "Generic (PLEG): container finished" podID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerID="4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0" exitCode=0 Dec 05 01:34:13 crc kubenswrapper[4665]: I1205 01:34:13.891677 4665 generic.go:334] "Generic (PLEG): container finished" podID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerID="94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9" exitCode=2 Dec 05 01:34:13 crc kubenswrapper[4665]: I1205 01:34:13.891691 4665 generic.go:334] "Generic (PLEG): container finished" podID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerID="a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786" exitCode=0 Dec 05 01:34:13 crc kubenswrapper[4665]: I1205 01:34:13.891633 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerDied","Data":"4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0"} Dec 05 01:34:13 crc kubenswrapper[4665]: I1205 01:34:13.891734 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerDied","Data":"94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9"} Dec 05 01:34:13 crc kubenswrapper[4665]: I1205 01:34:13.891757 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerDied","Data":"a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786"} Dec 05 01:34:14 crc kubenswrapper[4665]: E1205 01:34:14.250463 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:14 crc kubenswrapper[4665]: E1205 01:34:14.251713 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:14 crc kubenswrapper[4665]: E1205 01:34:14.252942 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:14 crc kubenswrapper[4665]: E1205 01:34:14.253006 4665 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:14 crc kubenswrapper[4665]: I1205 01:34:14.922777 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:34:14 crc kubenswrapper[4665]: I1205 01:34:14.922849 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.104360 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.513895 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.598638 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-scripts\") pod \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.598734 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-combined-ca-bundle\") pod \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.598786 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-log-httpd\") pod \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.598819 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-run-httpd\") pod \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.599021 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-sg-core-conf-yaml\") pod \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.599115 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-config-data\") pod \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.599153 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qqpj\" (UniqueName: \"kubernetes.io/projected/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-kube-api-access-6qqpj\") pod \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\" (UID: \"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9\") " Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.600546 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" (UID: "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.600855 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" (UID: "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.605183 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-kube-api-access-6qqpj" (OuterVolumeSpecName: "kube-api-access-6qqpj") pod "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" (UID: "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9"). InnerVolumeSpecName "kube-api-access-6qqpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.607673 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-scripts" (OuterVolumeSpecName: "scripts") pod "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" (UID: "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.632763 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" (UID: "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.671840 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" (UID: "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.694328 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-config-data" (OuterVolumeSpecName: "config-data") pod "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" (UID: "5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.701730 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qqpj\" (UniqueName: \"kubernetes.io/projected/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-kube-api-access-6qqpj\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.702025 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.702086 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.702159 4665 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.702214 4665 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.702274 4665 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.702469 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.920494 4665 generic.go:334] "Generic (PLEG): container finished" podID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerID="d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f" exitCode=0 Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.920525 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerDied","Data":"d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f"} Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.920699 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9","Type":"ContainerDied","Data":"2ec06270bd44d582f2c6a4045ebd784283546c58b2a0a39be0d4f6a0d3752789"} Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.920759 4665 scope.go:117] "RemoveContainer" containerID="4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.920567 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.957233 4665 scope.go:117] "RemoveContainer" containerID="94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9" Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.968492 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:16 crc kubenswrapper[4665]: I1205 01:34:16.994503 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.011531 4665 scope.go:117] "RemoveContainer" containerID="a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.011696 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:17 crc kubenswrapper[4665]: E1205 01:34:17.012151 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="ceilometer-central-agent" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.012166 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="ceilometer-central-agent" Dec 05 01:34:17 crc kubenswrapper[4665]: E1205 01:34:17.012179 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="ceilometer-notification-agent" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.012187 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="ceilometer-notification-agent" Dec 05 01:34:17 crc kubenswrapper[4665]: E1205 01:34:17.012216 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="sg-core" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.012224 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="sg-core" Dec 05 01:34:17 crc kubenswrapper[4665]: E1205 01:34:17.012235 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="proxy-httpd" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.012242 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="proxy-httpd" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.012470 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="ceilometer-notification-agent" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.012487 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="sg-core" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.012498 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="proxy-httpd" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.012525 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" containerName="ceilometer-central-agent" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.014581 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.018826 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.021529 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.027925 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.064010 4665 scope.go:117] "RemoveContainer" containerID="d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.084718 4665 scope.go:117] "RemoveContainer" containerID="4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0" Dec 05 01:34:17 crc kubenswrapper[4665]: E1205 01:34:17.086158 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0\": container with ID starting with 4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0 not found: ID does not exist" containerID="4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.086272 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0"} err="failed to get container status \"4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0\": rpc error: code = NotFound desc = could not find container \"4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0\": container with ID starting with 4d43f9acfe9cf1aeac8a4bf4b5be0105fec0c52402c77946894b83c3453cfda0 not found: ID does not exist" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.086396 4665 scope.go:117] "RemoveContainer" containerID="94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9" Dec 05 01:34:17 crc kubenswrapper[4665]: E1205 01:34:17.086743 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9\": container with ID starting with 94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9 not found: ID does not exist" containerID="94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.086766 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9"} err="failed to get container status \"94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9\": rpc error: code = NotFound desc = could not find container \"94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9\": container with ID starting with 94bff39de4df1ec50162342c20f42091693b1bcf2b22195ecf5504f80c582fe9 not found: ID does not exist" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.086783 4665 scope.go:117] "RemoveContainer" containerID="a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786" Dec 05 01:34:17 crc kubenswrapper[4665]: E1205 01:34:17.087029 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786\": container with ID starting with a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786 not found: ID does not exist" containerID="a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.087119 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786"} err="failed to get container status \"a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786\": rpc error: code = NotFound desc = could not find container \"a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786\": container with ID starting with a636e106d26847c2a149633e4a8f0fe17be12376a563f0be0787bfb7d1a33786 not found: ID does not exist" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.087222 4665 scope.go:117] "RemoveContainer" containerID="d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f" Dec 05 01:34:17 crc kubenswrapper[4665]: E1205 01:34:17.087530 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f\": container with ID starting with d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f not found: ID does not exist" containerID="d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.087556 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f"} err="failed to get container status \"d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f\": rpc error: code = NotFound desc = could not find container \"d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f\": container with ID starting with d1bace5049587023c2ebcfecb3b0c6685f9a502765b8359adde09778ee54b29f not found: ID does not exist" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.132431 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.133346 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.133496 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-run-httpd\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.133713 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-config-data\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.133949 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-scripts\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.134092 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-log-httpd\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.134286 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xmq7t\" (UniqueName: \"kubernetes.io/projected/8bb4c486-528d-4144-8fc6-e6940686a4dd-kube-api-access-xmq7t\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.235566 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-scripts\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.235609 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-log-httpd\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.235667 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xmq7t\" (UniqueName: \"kubernetes.io/projected/8bb4c486-528d-4144-8fc6-e6940686a4dd-kube-api-access-xmq7t\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.235742 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.235778 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.235811 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-run-httpd\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.235862 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-config-data\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.241566 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-run-httpd\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.243091 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.244158 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-log-httpd\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.244318 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-config-data\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.252863 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.254906 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-scripts\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.259409 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xmq7t\" (UniqueName: \"kubernetes.io/projected/8bb4c486-528d-4144-8fc6-e6940686a4dd-kube-api-access-xmq7t\") pod \"ceilometer-0\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.353598 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.819220 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:17 crc kubenswrapper[4665]: I1205 01:34:17.932688 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerStarted","Data":"70f8d2fe0cf5ede57d3f20855ee25c36b448a0cfc3fb384d6d938be67ae3d78b"} Dec 05 01:34:18 crc kubenswrapper[4665]: I1205 01:34:18.907887 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9" path="/var/lib/kubelet/pods/5860fa6e-abda-4cc8-813b-c3a5a1e0c0a9/volumes" Dec 05 01:34:18 crc kubenswrapper[4665]: I1205 01:34:18.940645 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerStarted","Data":"68b3f1b595238750bef7378e7a24b55f4b0d9775f1c9dcb135f969f2a9742b7f"} Dec 05 01:34:19 crc kubenswrapper[4665]: E1205 01:34:19.252768 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:19 crc kubenswrapper[4665]: E1205 01:34:19.254728 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:19 crc kubenswrapper[4665]: E1205 01:34:19.256348 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:19 crc kubenswrapper[4665]: E1205 01:34:19.256395 4665 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:19 crc kubenswrapper[4665]: I1205 01:34:19.953155 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerStarted","Data":"b2327f3a3d40cfabb4fd1bc803fff798ab806f62cd65d49bd02dbde35957ddb4"} Dec 05 01:34:19 crc kubenswrapper[4665]: I1205 01:34:19.953473 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerStarted","Data":"ef80a408e3dfb869f731e9f4ebe7102ec86958a834b7d54791cb09eecf8d7bb7"} Dec 05 01:34:20 crc kubenswrapper[4665]: I1205 01:34:20.968185 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerStarted","Data":"1741b529044af0c45d897c87c113fa912e971468a86a5d47b4294c6ae87e9a53"} Dec 05 01:34:20 crc kubenswrapper[4665]: I1205 01:34:20.968539 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:34:20 crc kubenswrapper[4665]: I1205 01:34:20.992422 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.162802633 podStartE2EDuration="4.992406247s" podCreationTimestamp="2025-12-05 01:34:16 +0000 UTC" firstStartedPulling="2025-12-05 01:34:17.817261063 +0000 UTC m=+1433.156653372" lastFinishedPulling="2025-12-05 01:34:20.646864687 +0000 UTC m=+1435.986256986" observedRunningTime="2025-12-05 01:34:20.991412583 +0000 UTC m=+1436.330804932" watchObservedRunningTime="2025-12-05 01:34:20.992406247 +0000 UTC m=+1436.331798546" Dec 05 01:34:24 crc kubenswrapper[4665]: E1205 01:34:24.252659 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:24 crc kubenswrapper[4665]: E1205 01:34:24.255572 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:24 crc kubenswrapper[4665]: E1205 01:34:24.257064 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:24 crc kubenswrapper[4665]: E1205 01:34:24.257121 4665 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:29 crc kubenswrapper[4665]: E1205 01:34:29.250580 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:29 crc kubenswrapper[4665]: E1205 01:34:29.252251 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:29 crc kubenswrapper[4665]: E1205 01:34:29.254278 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:29 crc kubenswrapper[4665]: E1205 01:34:29.254323 4665 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:34 crc kubenswrapper[4665]: E1205 01:34:34.250738 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:34 crc kubenswrapper[4665]: E1205 01:34:34.252409 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:34 crc kubenswrapper[4665]: E1205 01:34:34.253790 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:34 crc kubenswrapper[4665]: E1205 01:34:34.253832 4665 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:37 crc kubenswrapper[4665]: I1205 01:34:37.872177 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.028441 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2178a916-adc5-4ff5-8972-30b105320f5f-logs\") pod \"2178a916-adc5-4ff5-8972-30b105320f5f\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.028889 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2178a916-adc5-4ff5-8972-30b105320f5f-logs" (OuterVolumeSpecName: "logs") pod "2178a916-adc5-4ff5-8972-30b105320f5f" (UID: "2178a916-adc5-4ff5-8972-30b105320f5f"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.028538 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-scripts\") pod \"2178a916-adc5-4ff5-8972-30b105320f5f\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.029037 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-tls-certs\") pod \"2178a916-adc5-4ff5-8972-30b105320f5f\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.029084 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-config-data\") pod \"2178a916-adc5-4ff5-8972-30b105320f5f\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.029170 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-combined-ca-bundle\") pod \"2178a916-adc5-4ff5-8972-30b105320f5f\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.029206 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-secret-key\") pod \"2178a916-adc5-4ff5-8972-30b105320f5f\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.029240 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gklsm\" (UniqueName: \"kubernetes.io/projected/2178a916-adc5-4ff5-8972-30b105320f5f-kube-api-access-gklsm\") pod \"2178a916-adc5-4ff5-8972-30b105320f5f\" (UID: \"2178a916-adc5-4ff5-8972-30b105320f5f\") " Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.029693 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/2178a916-adc5-4ff5-8972-30b105320f5f-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.037823 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "2178a916-adc5-4ff5-8972-30b105320f5f" (UID: "2178a916-adc5-4ff5-8972-30b105320f5f"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.037911 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2178a916-adc5-4ff5-8972-30b105320f5f-kube-api-access-gklsm" (OuterVolumeSpecName: "kube-api-access-gklsm") pod "2178a916-adc5-4ff5-8972-30b105320f5f" (UID: "2178a916-adc5-4ff5-8972-30b105320f5f"). InnerVolumeSpecName "kube-api-access-gklsm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.053714 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-config-data" (OuterVolumeSpecName: "config-data") pod "2178a916-adc5-4ff5-8972-30b105320f5f" (UID: "2178a916-adc5-4ff5-8972-30b105320f5f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.056877 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-scripts" (OuterVolumeSpecName: "scripts") pod "2178a916-adc5-4ff5-8972-30b105320f5f" (UID: "2178a916-adc5-4ff5-8972-30b105320f5f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.057510 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2178a916-adc5-4ff5-8972-30b105320f5f" (UID: "2178a916-adc5-4ff5-8972-30b105320f5f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.085070 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "2178a916-adc5-4ff5-8972-30b105320f5f" (UID: "2178a916-adc5-4ff5-8972-30b105320f5f"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.131512 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.131543 4665 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.131554 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2178a916-adc5-4ff5-8972-30b105320f5f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.131563 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.131572 4665 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/2178a916-adc5-4ff5-8972-30b105320f5f-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.131580 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gklsm\" (UniqueName: \"kubernetes.io/projected/2178a916-adc5-4ff5-8972-30b105320f5f-kube-api-access-gklsm\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.145278 4665 generic.go:334] "Generic (PLEG): container finished" podID="2178a916-adc5-4ff5-8972-30b105320f5f" containerID="0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31" exitCode=137 Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.145340 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-86cd4c9876-glfvx" event={"ID":"2178a916-adc5-4ff5-8972-30b105320f5f","Type":"ContainerDied","Data":"0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31"} Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.145366 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-86cd4c9876-glfvx" event={"ID":"2178a916-adc5-4ff5-8972-30b105320f5f","Type":"ContainerDied","Data":"444032a90bc43a2efb352707374c096c88a6558c631b3ca70aff18e390775da3"} Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.145383 4665 scope.go:117] "RemoveContainer" containerID="dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.145507 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-86cd4c9876-glfvx" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.182753 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-86cd4c9876-glfvx"] Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.192364 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-86cd4c9876-glfvx"] Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.309183 4665 scope.go:117] "RemoveContainer" containerID="0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.331618 4665 scope.go:117] "RemoveContainer" containerID="dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada" Dec 05 01:34:38 crc kubenswrapper[4665]: E1205 01:34:38.331977 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada\": container with ID starting with dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada not found: ID does not exist" containerID="dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.332014 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada"} err="failed to get container status \"dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada\": rpc error: code = NotFound desc = could not find container \"dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada\": container with ID starting with dffd3d080b4583df799e6005bb5d553f12161985481db6c5307b59ee304b0ada not found: ID does not exist" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.332060 4665 scope.go:117] "RemoveContainer" containerID="0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31" Dec 05 01:34:38 crc kubenswrapper[4665]: E1205 01:34:38.332286 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31\": container with ID starting with 0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31 not found: ID does not exist" containerID="0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.332336 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31"} err="failed to get container status \"0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31\": rpc error: code = NotFound desc = could not find container \"0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31\": container with ID starting with 0668f57d4a1180cd02471099a7d09021483214f428c5cf14964b853a82613c31 not found: ID does not exist" Dec 05 01:34:38 crc kubenswrapper[4665]: I1205 01:34:38.905467 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" path="/var/lib/kubelet/pods/2178a916-adc5-4ff5-8972-30b105320f5f/volumes" Dec 05 01:34:39 crc kubenswrapper[4665]: E1205 01:34:39.250515 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:39 crc kubenswrapper[4665]: E1205 01:34:39.252323 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:39 crc kubenswrapper[4665]: E1205 01:34:39.253433 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" cmd=["/usr/bin/pgrep","-r","DRST","nova-conductor"] Dec 05 01:34:39 crc kubenswrapper[4665]: E1205 01:34:39.253504 4665 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-cell0-conductor-0" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.176663 4665 generic.go:334] "Generic (PLEG): container finished" podID="5803d957-267e-409c-914d-2b49c318aca6" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" exitCode=137 Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.176995 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"5803d957-267e-409c-914d-2b49c318aca6","Type":"ContainerDied","Data":"a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3"} Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.177213 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"5803d957-267e-409c-914d-2b49c318aca6","Type":"ContainerDied","Data":"e8db75c9d97f9256c4d7e12de901c759d155e9d7ece3f4561230841c42695189"} Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.177246 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e8db75c9d97f9256c4d7e12de901c759d155e9d7ece3f4561230841c42695189" Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.236817 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.386429 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bvndj\" (UniqueName: \"kubernetes.io/projected/5803d957-267e-409c-914d-2b49c318aca6-kube-api-access-bvndj\") pod \"5803d957-267e-409c-914d-2b49c318aca6\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.387036 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-config-data\") pod \"5803d957-267e-409c-914d-2b49c318aca6\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.387158 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-combined-ca-bundle\") pod \"5803d957-267e-409c-914d-2b49c318aca6\" (UID: \"5803d957-267e-409c-914d-2b49c318aca6\") " Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.396984 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5803d957-267e-409c-914d-2b49c318aca6-kube-api-access-bvndj" (OuterVolumeSpecName: "kube-api-access-bvndj") pod "5803d957-267e-409c-914d-2b49c318aca6" (UID: "5803d957-267e-409c-914d-2b49c318aca6"). InnerVolumeSpecName "kube-api-access-bvndj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.410447 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5803d957-267e-409c-914d-2b49c318aca6" (UID: "5803d957-267e-409c-914d-2b49c318aca6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.411392 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-config-data" (OuterVolumeSpecName: "config-data") pod "5803d957-267e-409c-914d-2b49c318aca6" (UID: "5803d957-267e-409c-914d-2b49c318aca6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.489904 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bvndj\" (UniqueName: \"kubernetes.io/projected/5803d957-267e-409c-914d-2b49c318aca6-kube-api-access-bvndj\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.489933 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:41 crc kubenswrapper[4665]: I1205 01:34:41.489942 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5803d957-267e-409c-914d-2b49c318aca6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.187181 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.236500 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.245885 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262003 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:42 crc kubenswrapper[4665]: E1205 01:34:42.262363 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262380 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" Dec 05 01:34:42 crc kubenswrapper[4665]: E1205 01:34:42.262390 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon-log" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262396 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon-log" Dec 05 01:34:42 crc kubenswrapper[4665]: E1205 01:34:42.262415 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262422 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" Dec 05 01:34:42 crc kubenswrapper[4665]: E1205 01:34:42.262447 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262453 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262610 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262628 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="5803d957-267e-409c-914d-2b49c318aca6" containerName="nova-cell0-conductor-conductor" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262638 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon-log" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.262649 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="2178a916-adc5-4ff5-8972-30b105320f5f" containerName="horizon" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.263410 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.267511 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.268050 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-frk4r" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.288354 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.404044 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clbsm\" (UniqueName: \"kubernetes.io/projected/fe074ce6-2f02-47f2-9e94-5a910517f64d-kube-api-access-clbsm\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.404158 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe074ce6-2f02-47f2-9e94-5a910517f64d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.404185 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe074ce6-2f02-47f2-9e94-5a910517f64d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.506464 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clbsm\" (UniqueName: \"kubernetes.io/projected/fe074ce6-2f02-47f2-9e94-5a910517f64d-kube-api-access-clbsm\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.506538 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe074ce6-2f02-47f2-9e94-5a910517f64d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.506560 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe074ce6-2f02-47f2-9e94-5a910517f64d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.512516 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fe074ce6-2f02-47f2-9e94-5a910517f64d-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.513238 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/fe074ce6-2f02-47f2-9e94-5a910517f64d-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.525003 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clbsm\" (UniqueName: \"kubernetes.io/projected/fe074ce6-2f02-47f2-9e94-5a910517f64d-kube-api-access-clbsm\") pod \"nova-cell0-conductor-0\" (UID: \"fe074ce6-2f02-47f2-9e94-5a910517f64d\") " pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.589638 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:42 crc kubenswrapper[4665]: I1205 01:34:42.905099 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5803d957-267e-409c-914d-2b49c318aca6" path="/var/lib/kubelet/pods/5803d957-267e-409c-914d-2b49c318aca6/volumes" Dec 05 01:34:43 crc kubenswrapper[4665]: I1205 01:34:43.041881 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 01:34:43 crc kubenswrapper[4665]: I1205 01:34:43.196995 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fe074ce6-2f02-47f2-9e94-5a910517f64d","Type":"ContainerStarted","Data":"77aa68a46795450a5f78b4a1d543fa0673c9b4f84d097a8c9a27b634f1c64ad7"} Dec 05 01:34:43 crc kubenswrapper[4665]: I1205 01:34:43.197032 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"fe074ce6-2f02-47f2-9e94-5a910517f64d","Type":"ContainerStarted","Data":"129dc5e7c6a573f10f8b2965c6758bfb5c73d08918459df04434deae1d632a9c"} Dec 05 01:34:43 crc kubenswrapper[4665]: I1205 01:34:43.197215 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:44 crc kubenswrapper[4665]: I1205 01:34:44.922733 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:34:44 crc kubenswrapper[4665]: I1205 01:34:44.923115 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:34:44 crc kubenswrapper[4665]: I1205 01:34:44.923175 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:34:44 crc kubenswrapper[4665]: I1205 01:34:44.924054 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8528e05d6539c1b4845305ab27b265834c7200bd6a2bd4006fa1a98598856bbe"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:34:44 crc kubenswrapper[4665]: I1205 01:34:44.924181 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://8528e05d6539c1b4845305ab27b265834c7200bd6a2bd4006fa1a98598856bbe" gracePeriod=600 Dec 05 01:34:45 crc kubenswrapper[4665]: I1205 01:34:45.220834 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="8528e05d6539c1b4845305ab27b265834c7200bd6a2bd4006fa1a98598856bbe" exitCode=0 Dec 05 01:34:45 crc kubenswrapper[4665]: I1205 01:34:45.220880 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"8528e05d6539c1b4845305ab27b265834c7200bd6a2bd4006fa1a98598856bbe"} Dec 05 01:34:45 crc kubenswrapper[4665]: I1205 01:34:45.220914 4665 scope.go:117] "RemoveContainer" containerID="ddea2cf7dfc7d76e1a9cf4f232382b2b597e0edaf17f47e1250c2d22c5805549" Dec 05 01:34:46 crc kubenswrapper[4665]: I1205 01:34:46.230737 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25"} Dec 05 01:34:46 crc kubenswrapper[4665]: I1205 01:34:46.252508 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=4.252486693 podStartE2EDuration="4.252486693s" podCreationTimestamp="2025-12-05 01:34:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:34:43.21542087 +0000 UTC m=+1458.554813179" watchObservedRunningTime="2025-12-05 01:34:46.252486693 +0000 UTC m=+1461.591878982" Dec 05 01:34:47 crc kubenswrapper[4665]: I1205 01:34:47.361117 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 01:34:50 crc kubenswrapper[4665]: I1205 01:34:50.834122 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:34:50 crc kubenswrapper[4665]: I1205 01:34:50.834887 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="d40b331f-9046-43a5-92e9-89ac2b567043" containerName="kube-state-metrics" containerID="cri-o://a9ff8bdcd3b3ecf7b32a9a7e0ec7c0f8f50930d24164db0958d82ef6e7f35912" gracePeriod=30 Dec 05 01:34:51 crc kubenswrapper[4665]: I1205 01:34:51.281748 4665 generic.go:334] "Generic (PLEG): container finished" podID="d40b331f-9046-43a5-92e9-89ac2b567043" containerID="a9ff8bdcd3b3ecf7b32a9a7e0ec7c0f8f50930d24164db0958d82ef6e7f35912" exitCode=2 Dec 05 01:34:51 crc kubenswrapper[4665]: I1205 01:34:51.282318 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d40b331f-9046-43a5-92e9-89ac2b567043","Type":"ContainerDied","Data":"a9ff8bdcd3b3ecf7b32a9a7e0ec7c0f8f50930d24164db0958d82ef6e7f35912"} Dec 05 01:34:51 crc kubenswrapper[4665]: I1205 01:34:51.282413 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"d40b331f-9046-43a5-92e9-89ac2b567043","Type":"ContainerDied","Data":"f0c5a153ceaa47f88c62a55ce0a4ffc182fce3e4547e4468be38dc4a6e81c5c3"} Dec 05 01:34:51 crc kubenswrapper[4665]: I1205 01:34:51.282426 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f0c5a153ceaa47f88c62a55ce0a4ffc182fce3e4547e4468be38dc4a6e81c5c3" Dec 05 01:34:51 crc kubenswrapper[4665]: I1205 01:34:51.307420 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 01:34:51 crc kubenswrapper[4665]: I1205 01:34:51.370269 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qmqjz\" (UniqueName: \"kubernetes.io/projected/d40b331f-9046-43a5-92e9-89ac2b567043-kube-api-access-qmqjz\") pod \"d40b331f-9046-43a5-92e9-89ac2b567043\" (UID: \"d40b331f-9046-43a5-92e9-89ac2b567043\") " Dec 05 01:34:51 crc kubenswrapper[4665]: I1205 01:34:51.377551 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d40b331f-9046-43a5-92e9-89ac2b567043-kube-api-access-qmqjz" (OuterVolumeSpecName: "kube-api-access-qmqjz") pod "d40b331f-9046-43a5-92e9-89ac2b567043" (UID: "d40b331f-9046-43a5-92e9-89ac2b567043"). InnerVolumeSpecName "kube-api-access-qmqjz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:34:51 crc kubenswrapper[4665]: I1205 01:34:51.473350 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qmqjz\" (UniqueName: \"kubernetes.io/projected/d40b331f-9046-43a5-92e9-89ac2b567043-kube-api-access-qmqjz\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.289604 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.319133 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.331727 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.359344 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:34:52 crc kubenswrapper[4665]: E1205 01:34:52.359795 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d40b331f-9046-43a5-92e9-89ac2b567043" containerName="kube-state-metrics" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.359813 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d40b331f-9046-43a5-92e9-89ac2b567043" containerName="kube-state-metrics" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.360090 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="d40b331f-9046-43a5-92e9-89ac2b567043" containerName="kube-state-metrics" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.360792 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.371123 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.376524 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.376998 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.488936 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xm5tf\" (UniqueName: \"kubernetes.io/projected/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-api-access-xm5tf\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.489016 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.489081 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.489173 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.590777 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.590863 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.590977 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xm5tf\" (UniqueName: \"kubernetes.io/projected/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-api-access-xm5tf\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.591010 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.597524 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.597662 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.603671 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.616425 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xm5tf\" (UniqueName: \"kubernetes.io/projected/f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c-kube-api-access-xm5tf\") pod \"kube-state-metrics-0\" (UID: \"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c\") " pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.617964 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.691009 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.772809 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.773493 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="ceilometer-central-agent" containerID="cri-o://68b3f1b595238750bef7378e7a24b55f4b0d9775f1c9dcb135f969f2a9742b7f" gracePeriod=30 Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.773558 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="proxy-httpd" containerID="cri-o://1741b529044af0c45d897c87c113fa912e971468a86a5d47b4294c6ae87e9a53" gracePeriod=30 Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.773601 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="sg-core" containerID="cri-o://b2327f3a3d40cfabb4fd1bc803fff798ab806f62cd65d49bd02dbde35957ddb4" gracePeriod=30 Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.773659 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="ceilometer-notification-agent" containerID="cri-o://ef80a408e3dfb869f731e9f4ebe7102ec86958a834b7d54791cb09eecf8d7bb7" gracePeriod=30 Dec 05 01:34:52 crc kubenswrapper[4665]: I1205 01:34:52.932882 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d40b331f-9046-43a5-92e9-89ac2b567043" path="/var/lib/kubelet/pods/d40b331f-9046-43a5-92e9-89ac2b567043/volumes" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.242691 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.300881 4665 generic.go:334] "Generic (PLEG): container finished" podID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerID="1741b529044af0c45d897c87c113fa912e971468a86a5d47b4294c6ae87e9a53" exitCode=0 Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.300919 4665 generic.go:334] "Generic (PLEG): container finished" podID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerID="b2327f3a3d40cfabb4fd1bc803fff798ab806f62cd65d49bd02dbde35957ddb4" exitCode=2 Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.300958 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerDied","Data":"1741b529044af0c45d897c87c113fa912e971468a86a5d47b4294c6ae87e9a53"} Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.301021 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerDied","Data":"b2327f3a3d40cfabb4fd1bc803fff798ab806f62cd65d49bd02dbde35957ddb4"} Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.302232 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c","Type":"ContainerStarted","Data":"a77d60ca5357926ad1acf3edc39ba41f0c5ddefd98e51cd9ccea4f2c045b3d52"} Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.475558 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-tdhhv"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.476877 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.483623 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.483776 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.503341 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-tdhhv"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.668364 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-config-data\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.668427 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-scripts\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.668481 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.668541 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jz5gw\" (UniqueName: \"kubernetes.io/projected/56a40c18-4bbe-4897-96ab-532ffb565321-kube-api-access-jz5gw\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.703417 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.704803 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.721147 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.746565 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.780566 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jz5gw\" (UniqueName: \"kubernetes.io/projected/56a40c18-4bbe-4897-96ab-532ffb565321-kube-api-access-jz5gw\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.780668 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-config-data\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.780697 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-scripts\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.780746 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.794676 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-config-data\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.795167 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.796455 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-scripts\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.811402 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.813248 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.827676 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.828914 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jz5gw\" (UniqueName: \"kubernetes.io/projected/56a40c18-4bbe-4897-96ab-532ffb565321-kube-api-access-jz5gw\") pod \"nova-cell0-cell-mapping-tdhhv\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.833978 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.882261 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.882372 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v55cr\" (UniqueName: \"kubernetes.io/projected/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-kube-api-access-v55cr\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.882438 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-config-data\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.942399 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.953028 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.961625 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.966823 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.993117 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-config-data\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.993256 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-config-data\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.993321 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jw6g4\" (UniqueName: \"kubernetes.io/projected/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-kube-api-access-jw6g4\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.993340 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-logs\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.993384 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.993400 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v55cr\" (UniqueName: \"kubernetes.io/projected/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-kube-api-access-v55cr\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:53 crc kubenswrapper[4665]: I1205 01:34:53.993444 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.006440 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.021671 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-config-data\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.033177 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v55cr\" (UniqueName: \"kubernetes.io/projected/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-kube-api-access-v55cr\") pod \"nova-scheduler-0\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " pod="openstack/nova-scheduler-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.071066 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-cqsxn"] Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.080728 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.094652 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-config-data\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.095004 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtz52\" (UniqueName: \"kubernetes.io/projected/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-kube-api-access-vtz52\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.095090 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.095161 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jw6g4\" (UniqueName: \"kubernetes.io/projected/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-kube-api-access-jw6g4\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.095275 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-logs\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.095397 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-logs\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.095551 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.095665 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-config-data\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.096394 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-logs\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.098927 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-config-data\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.104171 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.105153 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.151047 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-cqsxn"] Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.161051 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jw6g4\" (UniqueName: \"kubernetes.io/projected/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-kube-api-access-jw6g4\") pod \"nova-api-0\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208530 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b9qzl\" (UniqueName: \"kubernetes.io/projected/44e024d0-ebb1-4a1f-9761-f47b20539a2f-kube-api-access-b9qzl\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208571 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208607 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtz52\" (UniqueName: \"kubernetes.io/projected/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-kube-api-access-vtz52\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208627 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208669 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-logs\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208699 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208739 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-svc\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208775 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-config-data\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208813 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.208876 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-config\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.220474 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-logs\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.225329 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-config-data\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.225835 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.245238 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtz52\" (UniqueName: \"kubernetes.io/projected/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-kube-api-access-vtz52\") pod \"nova-metadata-0\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.253730 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.262167 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.280122 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.281675 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.285176 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.307981 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.312374 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-config\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.312427 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b9qzl\" (UniqueName: \"kubernetes.io/projected/44e024d0-ebb1-4a1f-9761-f47b20539a2f-kube-api-access-b9qzl\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.312452 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.312520 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.312556 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-svc\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.312611 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.313412 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-swift-storage-0\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.313943 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-config\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.314267 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-nb\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.314534 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-sb\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.315055 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-svc\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.340948 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.351730 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b9qzl\" (UniqueName: \"kubernetes.io/projected/44e024d0-ebb1-4a1f-9761-f47b20539a2f-kube-api-access-b9qzl\") pod \"dnsmasq-dns-757b4f8459-cqsxn\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.367080 4665 generic.go:334] "Generic (PLEG): container finished" podID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerID="68b3f1b595238750bef7378e7a24b55f4b0d9775f1c9dcb135f969f2a9742b7f" exitCode=0 Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.367177 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerDied","Data":"68b3f1b595238750bef7378e7a24b55f4b0d9775f1c9dcb135f969f2a9742b7f"} Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.416766 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.416977 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smm9x\" (UniqueName: \"kubernetes.io/projected/16c94c27-cb84-4662-b838-9dd308b4eabb-kube-api-access-smm9x\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.417048 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.436182 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.519145 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.519267 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.519313 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smm9x\" (UniqueName: \"kubernetes.io/projected/16c94c27-cb84-4662-b838-9dd308b4eabb-kube-api-access-smm9x\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.530473 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.530995 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.544008 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smm9x\" (UniqueName: \"kubernetes.io/projected/16c94c27-cb84-4662-b838-9dd308b4eabb-kube-api-access-smm9x\") pod \"nova-cell1-novncproxy-0\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.627803 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:34:54 crc kubenswrapper[4665]: I1205 01:34:54.964975 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-tdhhv"] Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.408051 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.430695 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c","Type":"ContainerStarted","Data":"c109117e4c9b13a14a713bffa6a80c8da5cb8c4b26804ca40069ba626edcd7be"} Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.430758 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.433889 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tdhhv" event={"ID":"56a40c18-4bbe-4897-96ab-532ffb565321","Type":"ContainerStarted","Data":"0235f49de574c0322c2723969ae380fb7ad992941f7bbd6a07821bfa79d68e3e"} Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.439788 4665 generic.go:334] "Generic (PLEG): container finished" podID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerID="ef80a408e3dfb869f731e9f4ebe7102ec86958a834b7d54791cb09eecf8d7bb7" exitCode=0 Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.439826 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerDied","Data":"ef80a408e3dfb869f731e9f4ebe7102ec86958a834b7d54791cb09eecf8d7bb7"} Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.462328 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-lqq2r"] Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.463614 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.470552 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.470948 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.487754 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-lqq2r"] Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.509861 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=3.09621199 podStartE2EDuration="3.509837468s" podCreationTimestamp="2025-12-05 01:34:52 +0000 UTC" firstStartedPulling="2025-12-05 01:34:53.246757311 +0000 UTC m=+1468.586149610" lastFinishedPulling="2025-12-05 01:34:53.660382799 +0000 UTC m=+1468.999775088" observedRunningTime="2025-12-05 01:34:55.448253348 +0000 UTC m=+1470.787645647" watchObservedRunningTime="2025-12-05 01:34:55.509837468 +0000 UTC m=+1470.849229767" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.654612 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-tdhhv" podStartSLOduration=2.65458814 podStartE2EDuration="2.65458814s" podCreationTimestamp="2025-12-05 01:34:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:34:55.469318557 +0000 UTC m=+1470.808710866" watchObservedRunningTime="2025-12-05 01:34:55.65458814 +0000 UTC m=+1470.993980439" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.665042 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-scripts\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.665106 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rwzsl\" (UniqueName: \"kubernetes.io/projected/cffe2874-1254-4832-943d-59419d486859-kube-api-access-rwzsl\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.665177 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.665272 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-config-data\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.775600 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.775711 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-config-data\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.775748 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-scripts\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.775775 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rwzsl\" (UniqueName: \"kubernetes.io/projected/cffe2874-1254-4832-943d-59419d486859-kube-api-access-rwzsl\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.803418 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-config-data\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.826652 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-scripts\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.840671 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rwzsl\" (UniqueName: \"kubernetes.io/projected/cffe2874-1254-4832-943d-59419d486859-kube-api-access-rwzsl\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.864773 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.894369 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.915637 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-lqq2r\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.947465 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-cqsxn"] Dec 05 01:34:55 crc kubenswrapper[4665]: I1205 01:34:55.966843 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.066779 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.098039 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.188938 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xmq7t\" (UniqueName: \"kubernetes.io/projected/8bb4c486-528d-4144-8fc6-e6940686a4dd-kube-api-access-xmq7t\") pod \"8bb4c486-528d-4144-8fc6-e6940686a4dd\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.188995 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-log-httpd\") pod \"8bb4c486-528d-4144-8fc6-e6940686a4dd\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.189032 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-config-data\") pod \"8bb4c486-528d-4144-8fc6-e6940686a4dd\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.189184 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-scripts\") pod \"8bb4c486-528d-4144-8fc6-e6940686a4dd\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.189231 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-combined-ca-bundle\") pod \"8bb4c486-528d-4144-8fc6-e6940686a4dd\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.189253 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-sg-core-conf-yaml\") pod \"8bb4c486-528d-4144-8fc6-e6940686a4dd\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.189283 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-run-httpd\") pod \"8bb4c486-528d-4144-8fc6-e6940686a4dd\" (UID: \"8bb4c486-528d-4144-8fc6-e6940686a4dd\") " Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.190050 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "8bb4c486-528d-4144-8fc6-e6940686a4dd" (UID: "8bb4c486-528d-4144-8fc6-e6940686a4dd"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.192151 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "8bb4c486-528d-4144-8fc6-e6940686a4dd" (UID: "8bb4c486-528d-4144-8fc6-e6940686a4dd"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.234488 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-scripts" (OuterVolumeSpecName: "scripts") pod "8bb4c486-528d-4144-8fc6-e6940686a4dd" (UID: "8bb4c486-528d-4144-8fc6-e6940686a4dd"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.234771 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8bb4c486-528d-4144-8fc6-e6940686a4dd-kube-api-access-xmq7t" (OuterVolumeSpecName: "kube-api-access-xmq7t") pod "8bb4c486-528d-4144-8fc6-e6940686a4dd" (UID: "8bb4c486-528d-4144-8fc6-e6940686a4dd"). InnerVolumeSpecName "kube-api-access-xmq7t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.291466 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.291679 4665 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.291688 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xmq7t\" (UniqueName: \"kubernetes.io/projected/8bb4c486-528d-4144-8fc6-e6940686a4dd-kube-api-access-xmq7t\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.291700 4665 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/8bb4c486-528d-4144-8fc6-e6940686a4dd-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.324443 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "8bb4c486-528d-4144-8fc6-e6940686a4dd" (UID: "8bb4c486-528d-4144-8fc6-e6940686a4dd"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.394419 4665 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.494520 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e35dc66b-8013-4cd5-96d3-a3d8f0faa070","Type":"ContainerStarted","Data":"21429af4ae47a6de87cba8e1eb6b132129df1309e0b317441701536a4fd55dee"} Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.494683 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-config-data" (OuterVolumeSpecName: "config-data") pod "8bb4c486-528d-4144-8fc6-e6940686a4dd" (UID: "8bb4c486-528d-4144-8fc6-e6940686a4dd"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.495804 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.496890 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "8bb4c486-528d-4144-8fc6-e6940686a4dd" (UID: "8bb4c486-528d-4144-8fc6-e6940686a4dd"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.512103 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8","Type":"ContainerStarted","Data":"9c95f25d39c6486e6981090c8f87fecde0d6d16162d74f48d6cca1d3723c6a89"} Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.516334 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tdhhv" event={"ID":"56a40c18-4bbe-4897-96ab-532ffb565321","Type":"ContainerStarted","Data":"dffed07b73eb3577e3c4ba61d5e18d3b7b2328bda7e18e4fae8d6773e88002ad"} Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.526372 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"8bb4c486-528d-4144-8fc6-e6940686a4dd","Type":"ContainerDied","Data":"70f8d2fe0cf5ede57d3f20855ee25c36b448a0cfc3fb384d6d938be67ae3d78b"} Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.526469 4665 scope.go:117] "RemoveContainer" containerID="1741b529044af0c45d897c87c113fa912e971468a86a5d47b4294c6ae87e9a53" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.526682 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.540749 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" event={"ID":"44e024d0-ebb1-4a1f-9761-f47b20539a2f","Type":"ContainerStarted","Data":"eb76217eba3af3eab6b9954e3eb543e7b714d696f725f649d14a677f402c2822"} Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.540784 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" event={"ID":"44e024d0-ebb1-4a1f-9761-f47b20539a2f","Type":"ContainerStarted","Data":"74f34453cf2139c149b6eabbd3e7ba5a04cc2735860354bf639df72f3a0dc59c"} Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.570115 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c","Type":"ContainerStarted","Data":"6379eed14817bb7da02f4a4a063dbb2734fa867c548db663a10f47ca940f577b"} Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.604985 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"16c94c27-cb84-4662-b838-9dd308b4eabb","Type":"ContainerStarted","Data":"4e5785efc55077f5efd8a957de67c54ce7bd26eaf7b6cced43b9098bc74bfc00"} Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.605607 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/8bb4c486-528d-4144-8fc6-e6940686a4dd-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.621522 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.634756 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.673050 4665 scope.go:117] "RemoveContainer" containerID="b2327f3a3d40cfabb4fd1bc803fff798ab806f62cd65d49bd02dbde35957ddb4" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.689741 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:56 crc kubenswrapper[4665]: E1205 01:34:56.690252 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="proxy-httpd" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.690273 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="proxy-httpd" Dec 05 01:34:56 crc kubenswrapper[4665]: E1205 01:34:56.690462 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="ceilometer-central-agent" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.690477 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="ceilometer-central-agent" Dec 05 01:34:56 crc kubenswrapper[4665]: E1205 01:34:56.690495 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="ceilometer-notification-agent" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.690505 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="ceilometer-notification-agent" Dec 05 01:34:56 crc kubenswrapper[4665]: E1205 01:34:56.690526 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="sg-core" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.690537 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="sg-core" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.690761 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="ceilometer-central-agent" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.690784 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="ceilometer-notification-agent" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.690802 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="sg-core" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.690815 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" containerName="proxy-httpd" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.693624 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.713864 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.714093 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.720027 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.749342 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.760196 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-lqq2r"] Dec 05 01:34:56 crc kubenswrapper[4665]: W1205 01:34:56.775985 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcffe2874_1254_4832_943d_59419d486859.slice/crio-0d1c8d889dc5e54d49082a852922862f6686cfcc38ef65e60dc129b1223034d8 WatchSource:0}: Error finding container 0d1c8d889dc5e54d49082a852922862f6686cfcc38ef65e60dc129b1223034d8: Status 404 returned error can't find the container with id 0d1c8d889dc5e54d49082a852922862f6686cfcc38ef65e60dc129b1223034d8 Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.810366 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.810482 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lbwzf\" (UniqueName: \"kubernetes.io/projected/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-kube-api-access-lbwzf\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.810703 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-scripts\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.810859 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-log-httpd\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.811026 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-config-data\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.811066 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.811094 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-run-httpd\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.811143 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.915043 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.915097 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-run-httpd\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.915126 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.915190 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.916264 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lbwzf\" (UniqueName: \"kubernetes.io/projected/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-kube-api-access-lbwzf\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.916330 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-scripts\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.916363 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-log-httpd\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.916416 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-config-data\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.916491 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-run-httpd\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.917274 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-log-httpd\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.920446 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-scripts\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.929638 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.931660 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.935646 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-config-data\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.936585 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:56 crc kubenswrapper[4665]: I1205 01:34:56.942568 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lbwzf\" (UniqueName: \"kubernetes.io/projected/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-kube-api-access-lbwzf\") pod \"ceilometer-0\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " pod="openstack/ceilometer-0" Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.001616 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8bb4c486-528d-4144-8fc6-e6940686a4dd" path="/var/lib/kubelet/pods/8bb4c486-528d-4144-8fc6-e6940686a4dd/volumes" Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.065750 4665 scope.go:117] "RemoveContainer" containerID="ef80a408e3dfb869f731e9f4ebe7102ec86958a834b7d54791cb09eecf8d7bb7" Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.129534 4665 scope.go:117] "RemoveContainer" containerID="68b3f1b595238750bef7378e7a24b55f4b0d9775f1c9dcb135f969f2a9742b7f" Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.207026 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.624276 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" event={"ID":"cffe2874-1254-4832-943d-59419d486859","Type":"ContainerStarted","Data":"d32d4c60f5a286b86e07ae60fbe95e3d9a21a73670e77f52117a1dc477781641"} Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.624340 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" event={"ID":"cffe2874-1254-4832-943d-59419d486859","Type":"ContainerStarted","Data":"0d1c8d889dc5e54d49082a852922862f6686cfcc38ef65e60dc129b1223034d8"} Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.653368 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" podStartSLOduration=2.653345641 podStartE2EDuration="2.653345641s" podCreationTimestamp="2025-12-05 01:34:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:34:57.646796772 +0000 UTC m=+1472.986189091" watchObservedRunningTime="2025-12-05 01:34:57.653345641 +0000 UTC m=+1472.992737940" Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.667768 4665 generic.go:334] "Generic (PLEG): container finished" podID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" containerID="eb76217eba3af3eab6b9954e3eb543e7b714d696f725f649d14a677f402c2822" exitCode=0 Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.669263 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" event={"ID":"44e024d0-ebb1-4a1f-9761-f47b20539a2f","Type":"ContainerDied","Data":"eb76217eba3af3eab6b9954e3eb543e7b714d696f725f649d14a677f402c2822"} Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.669306 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" event={"ID":"44e024d0-ebb1-4a1f-9761-f47b20539a2f","Type":"ContainerStarted","Data":"8dddd25fa836af08b94cc444e895d5eeef63ef3292ffd922b070e72f2453a6e0"} Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.669338 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.699174 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" podStartSLOduration=3.699155288 podStartE2EDuration="3.699155288s" podCreationTimestamp="2025-12-05 01:34:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:34:57.691837012 +0000 UTC m=+1473.031229311" watchObservedRunningTime="2025-12-05 01:34:57.699155288 +0000 UTC m=+1473.038547587" Dec 05 01:34:57 crc kubenswrapper[4665]: I1205 01:34:57.771894 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:34:58 crc kubenswrapper[4665]: I1205 01:34:58.330904 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:34:58 crc kubenswrapper[4665]: I1205 01:34:58.349875 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:34:58 crc kubenswrapper[4665]: I1205 01:34:58.684407 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerStarted","Data":"a03b13cc8dd66ad952717657320087391c135257eb007f3d8dfbe6bdae651044"} Dec 05 01:35:01 crc kubenswrapper[4665]: I1205 01:35:01.739214 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"16c94c27-cb84-4662-b838-9dd308b4eabb","Type":"ContainerStarted","Data":"a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc"} Dec 05 01:35:01 crc kubenswrapper[4665]: I1205 01:35:01.739321 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="16c94c27-cb84-4662-b838-9dd308b4eabb" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc" gracePeriod=30 Dec 05 01:35:01 crc kubenswrapper[4665]: I1205 01:35:01.764244 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e35dc66b-8013-4cd5-96d3-a3d8f0faa070","Type":"ContainerStarted","Data":"021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031"} Dec 05 01:35:01 crc kubenswrapper[4665]: I1205 01:35:01.768332 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.7174208970000002 podStartE2EDuration="7.768281523s" podCreationTimestamp="2025-12-05 01:34:54 +0000 UTC" firstStartedPulling="2025-12-05 01:34:55.82774137 +0000 UTC m=+1471.167133669" lastFinishedPulling="2025-12-05 01:35:00.878601996 +0000 UTC m=+1476.217994295" observedRunningTime="2025-12-05 01:35:01.7569756 +0000 UTC m=+1477.096367899" watchObservedRunningTime="2025-12-05 01:35:01.768281523 +0000 UTC m=+1477.107673822" Dec 05 01:35:01 crc kubenswrapper[4665]: I1205 01:35:01.776606 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8","Type":"ContainerStarted","Data":"293d123617a77cadd2d207403a86d70bf0e613a462a2275f2e0f45327c40cc56"} Dec 05 01:35:01 crc kubenswrapper[4665]: I1205 01:35:01.792486 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=3.576274331 podStartE2EDuration="8.792463878s" podCreationTimestamp="2025-12-05 01:34:53 +0000 UTC" firstStartedPulling="2025-12-05 01:34:55.652640003 +0000 UTC m=+1470.992032302" lastFinishedPulling="2025-12-05 01:35:00.86882955 +0000 UTC m=+1476.208221849" observedRunningTime="2025-12-05 01:35:01.784529746 +0000 UTC m=+1477.123922045" watchObservedRunningTime="2025-12-05 01:35:01.792463878 +0000 UTC m=+1477.131856177" Dec 05 01:35:01 crc kubenswrapper[4665]: I1205 01:35:01.794031 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c","Type":"ContainerStarted","Data":"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7"} Dec 05 01:35:01 crc kubenswrapper[4665]: I1205 01:35:01.806525 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerStarted","Data":"9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29"} Dec 05 01:35:02 crc kubenswrapper[4665]: I1205 01:35:02.816992 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8","Type":"ContainerStarted","Data":"9821f8dc0effcbcc2c0601af1813de6a2acced24cc7372c5a47515f8f166d06b"} Dec 05 01:35:02 crc kubenswrapper[4665]: I1205 01:35:02.818840 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c","Type":"ContainerStarted","Data":"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d"} Dec 05 01:35:02 crc kubenswrapper[4665]: I1205 01:35:02.818958 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerName="nova-metadata-log" containerID="cri-o://9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7" gracePeriod=30 Dec 05 01:35:02 crc kubenswrapper[4665]: I1205 01:35:02.818966 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerName="nova-metadata-metadata" containerID="cri-o://72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d" gracePeriod=30 Dec 05 01:35:02 crc kubenswrapper[4665]: I1205 01:35:02.824104 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerStarted","Data":"561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa"} Dec 05 01:35:02 crc kubenswrapper[4665]: I1205 01:35:02.848275 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=4.633617572 podStartE2EDuration="9.848259353s" podCreationTimestamp="2025-12-05 01:34:53 +0000 UTC" firstStartedPulling="2025-12-05 01:34:55.648031731 +0000 UTC m=+1470.987424040" lastFinishedPulling="2025-12-05 01:35:00.862673532 +0000 UTC m=+1476.202065821" observedRunningTime="2025-12-05 01:35:02.844750538 +0000 UTC m=+1478.184142837" watchObservedRunningTime="2025-12-05 01:35:02.848259353 +0000 UTC m=+1478.187651652" Dec 05 01:35:02 crc kubenswrapper[4665]: I1205 01:35:02.883277 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=4.421368627 podStartE2EDuration="9.88325898s" podCreationTimestamp="2025-12-05 01:34:53 +0000 UTC" firstStartedPulling="2025-12-05 01:34:55.416763135 +0000 UTC m=+1470.756155434" lastFinishedPulling="2025-12-05 01:35:00.878653488 +0000 UTC m=+1476.218045787" observedRunningTime="2025-12-05 01:35:02.878811613 +0000 UTC m=+1478.218203912" watchObservedRunningTime="2025-12-05 01:35:02.88325898 +0000 UTC m=+1478.222651279" Dec 05 01:35:02 crc kubenswrapper[4665]: I1205 01:35:02.913371 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.717805 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.822462 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-logs\") pod \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.822641 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vtz52\" (UniqueName: \"kubernetes.io/projected/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-kube-api-access-vtz52\") pod \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.822756 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-combined-ca-bundle\") pod \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.822819 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-logs" (OuterVolumeSpecName: "logs") pod "348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" (UID: "348c2ac5-4b5f-42c3-a4ab-e2576ebc099c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.822896 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-config-data\") pod \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\" (UID: \"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c\") " Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.823561 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.834594 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-kube-api-access-vtz52" (OuterVolumeSpecName: "kube-api-access-vtz52") pod "348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" (UID: "348c2ac5-4b5f-42c3-a4ab-e2576ebc099c"). InnerVolumeSpecName "kube-api-access-vtz52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.842433 4665 generic.go:334] "Generic (PLEG): container finished" podID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerID="72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d" exitCode=0 Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.842612 4665 generic.go:334] "Generic (PLEG): container finished" podID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerID="9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7" exitCode=143 Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.842851 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.843913 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c","Type":"ContainerDied","Data":"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d"} Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.844057 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c","Type":"ContainerDied","Data":"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7"} Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.844159 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"348c2ac5-4b5f-42c3-a4ab-e2576ebc099c","Type":"ContainerDied","Data":"6379eed14817bb7da02f4a4a063dbb2734fa867c548db663a10f47ca940f577b"} Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.844259 4665 scope.go:117] "RemoveContainer" containerID="72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.848204 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerStarted","Data":"129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b"} Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.868981 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" (UID: "348c2ac5-4b5f-42c3-a4ab-e2576ebc099c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.883488 4665 scope.go:117] "RemoveContainer" containerID="9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.894345 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-config-data" (OuterVolumeSpecName: "config-data") pod "348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" (UID: "348c2ac5-4b5f-42c3-a4ab-e2576ebc099c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.914723 4665 scope.go:117] "RemoveContainer" containerID="72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d" Dec 05 01:35:03 crc kubenswrapper[4665]: E1205 01:35:03.915113 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d\": container with ID starting with 72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d not found: ID does not exist" containerID="72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.915160 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d"} err="failed to get container status \"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d\": rpc error: code = NotFound desc = could not find container \"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d\": container with ID starting with 72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d not found: ID does not exist" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.915185 4665 scope.go:117] "RemoveContainer" containerID="9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7" Dec 05 01:35:03 crc kubenswrapper[4665]: E1205 01:35:03.915634 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7\": container with ID starting with 9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7 not found: ID does not exist" containerID="9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.915665 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7"} err="failed to get container status \"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7\": rpc error: code = NotFound desc = could not find container \"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7\": container with ID starting with 9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7 not found: ID does not exist" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.915682 4665 scope.go:117] "RemoveContainer" containerID="72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.915906 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d"} err="failed to get container status \"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d\": rpc error: code = NotFound desc = could not find container \"72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d\": container with ID starting with 72bbca08eb40ac138dd3dc6ffd52abcb83f0107d81141856e27be7a15868ae8d not found: ID does not exist" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.915932 4665 scope.go:117] "RemoveContainer" containerID="9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.916167 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7"} err="failed to get container status \"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7\": rpc error: code = NotFound desc = could not find container \"9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7\": container with ID starting with 9e89c4aa5b84ed91fd84dde1516b0684738846694bbd4fa1abfdbe4fc0a6f8a7 not found: ID does not exist" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.924199 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vtz52\" (UniqueName: \"kubernetes.io/projected/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-kube-api-access-vtz52\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.925173 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:03 crc kubenswrapper[4665]: I1205 01:35:03.925251 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.179338 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.186937 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.228933 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:04 crc kubenswrapper[4665]: E1205 01:35:04.229651 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerName="nova-metadata-metadata" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.229669 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerName="nova-metadata-metadata" Dec 05 01:35:04 crc kubenswrapper[4665]: E1205 01:35:04.229726 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerName="nova-metadata-log" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.229732 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerName="nova-metadata-log" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.230061 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerName="nova-metadata-metadata" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.230094 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" containerName="nova-metadata-log" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.231548 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.236879 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.251097 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.262843 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.280790 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.280823 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.311808 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.311850 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.313534 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.335118 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.335913 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-config-data\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.336048 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfb03a0e-eccb-4524-9ddb-a298064a6a24-logs\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.336130 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xppvt\" (UniqueName: \"kubernetes.io/projected/cfb03a0e-eccb-4524-9ddb-a298064a6a24-kube-api-access-xppvt\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.336253 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.437467 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.437889 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.437950 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-config-data\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.438013 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfb03a0e-eccb-4524-9ddb-a298064a6a24-logs\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.438033 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xppvt\" (UniqueName: \"kubernetes.io/projected/cfb03a0e-eccb-4524-9ddb-a298064a6a24-kube-api-access-xppvt\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.438091 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.439269 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfb03a0e-eccb-4524-9ddb-a298064a6a24-logs\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.443885 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.467802 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.467846 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-config-data\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.501902 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xppvt\" (UniqueName: \"kubernetes.io/projected/cfb03a0e-eccb-4524-9ddb-a298064a6a24-kube-api-access-xppvt\") pod \"nova-metadata-0\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.564723 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-9nqnv"] Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.564981 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" podUID="7295fa1c-4087-4ae8-a38e-29edf3da8381" containerName="dnsmasq-dns" containerID="cri-o://02d04c8390c3a729b392254dd9073f799d10e34bd15cff5c73211b5c4e847bc1" gracePeriod=10 Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.598248 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.629437 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.962589 4665 generic.go:334] "Generic (PLEG): container finished" podID="7295fa1c-4087-4ae8-a38e-29edf3da8381" containerID="02d04c8390c3a729b392254dd9073f799d10e34bd15cff5c73211b5c4e847bc1" exitCode=0 Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.965505 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="348c2ac5-4b5f-42c3-a4ab-e2576ebc099c" path="/var/lib/kubelet/pods/348c2ac5-4b5f-42c3-a4ab-e2576ebc099c/volumes" Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.966206 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" event={"ID":"7295fa1c-4087-4ae8-a38e-29edf3da8381","Type":"ContainerDied","Data":"02d04c8390c3a729b392254dd9073f799d10e34bd15cff5c73211b5c4e847bc1"} Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.966241 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lxv5x"] Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.981608 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lxv5x"] Dec 05 01:35:04 crc kubenswrapper[4665]: I1205 01:35:04.981715 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.044544 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.169528 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtzmk\" (UniqueName: \"kubernetes.io/projected/381ce009-2870-44a4-a156-e181fc862e7b-kube-api-access-wtzmk\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.169906 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-catalog-content\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.170186 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-utilities\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.213269 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.272504 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-catalog-content\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.272631 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-utilities\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.272773 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtzmk\" (UniqueName: \"kubernetes.io/projected/381ce009-2870-44a4-a156-e181fc862e7b-kube-api-access-wtzmk\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.273104 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-catalog-content\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.273690 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-utilities\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.295052 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtzmk\" (UniqueName: \"kubernetes.io/projected/381ce009-2870-44a4-a156-e181fc862e7b-kube-api-access-wtzmk\") pod \"redhat-operators-lxv5x\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.393894 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.394208 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.189:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.395877 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.506387 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.582738 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-nb\") pod \"7295fa1c-4087-4ae8-a38e-29edf3da8381\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.583063 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-sb\") pod \"7295fa1c-4087-4ae8-a38e-29edf3da8381\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.583104 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-swift-storage-0\") pod \"7295fa1c-4087-4ae8-a38e-29edf3da8381\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.583163 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-svc\") pod \"7295fa1c-4087-4ae8-a38e-29edf3da8381\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.583324 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljz2l\" (UniqueName: \"kubernetes.io/projected/7295fa1c-4087-4ae8-a38e-29edf3da8381-kube-api-access-ljz2l\") pod \"7295fa1c-4087-4ae8-a38e-29edf3da8381\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.594050 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-config\") pod \"7295fa1c-4087-4ae8-a38e-29edf3da8381\" (UID: \"7295fa1c-4087-4ae8-a38e-29edf3da8381\") " Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.594508 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7295fa1c-4087-4ae8-a38e-29edf3da8381-kube-api-access-ljz2l" (OuterVolumeSpecName: "kube-api-access-ljz2l") pod "7295fa1c-4087-4ae8-a38e-29edf3da8381" (UID: "7295fa1c-4087-4ae8-a38e-29edf3da8381"). InnerVolumeSpecName "kube-api-access-ljz2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.594755 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljz2l\" (UniqueName: \"kubernetes.io/projected/7295fa1c-4087-4ae8-a38e-29edf3da8381-kube-api-access-ljz2l\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.753936 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-config" (OuterVolumeSpecName: "config") pod "7295fa1c-4087-4ae8-a38e-29edf3da8381" (UID: "7295fa1c-4087-4ae8-a38e-29edf3da8381"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.759663 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "7295fa1c-4087-4ae8-a38e-29edf3da8381" (UID: "7295fa1c-4087-4ae8-a38e-29edf3da8381"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.774874 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7295fa1c-4087-4ae8-a38e-29edf3da8381" (UID: "7295fa1c-4087-4ae8-a38e-29edf3da8381"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.805524 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.805558 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.805573 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.860035 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "7295fa1c-4087-4ae8-a38e-29edf3da8381" (UID: "7295fa1c-4087-4ae8-a38e-29edf3da8381"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.860655 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "7295fa1c-4087-4ae8-a38e-29edf3da8381" (UID: "7295fa1c-4087-4ae8-a38e-29edf3da8381"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.910950 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:05 crc kubenswrapper[4665]: I1205 01:35:05.910976 4665 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/7295fa1c-4087-4ae8-a38e-29edf3da8381-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.000882 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cfb03a0e-eccb-4524-9ddb-a298064a6a24","Type":"ContainerStarted","Data":"69b582169d4db369fd62b6f83b2cd3930bc2f6a235862e8e85bdce0b58cf322d"} Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.001224 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cfb03a0e-eccb-4524-9ddb-a298064a6a24","Type":"ContainerStarted","Data":"fa719e73cde3454ed7d8a98e86accb442732ca8243f1ebd1e4b1c3d895b35037"} Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.005620 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerStarted","Data":"a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6"} Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.006771 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.009336 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.010089 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c9776ccc5-9nqnv" event={"ID":"7295fa1c-4087-4ae8-a38e-29edf3da8381","Type":"ContainerDied","Data":"658890844f8674ec454ea27ba3ffe079b12ccea50fd4645d53c8a477e644bbf8"} Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.010146 4665 scope.go:117] "RemoveContainer" containerID="02d04c8390c3a729b392254dd9073f799d10e34bd15cff5c73211b5c4e847bc1" Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.069791 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.880528533 podStartE2EDuration="10.069754199s" podCreationTimestamp="2025-12-05 01:34:56 +0000 UTC" firstStartedPulling="2025-12-05 01:34:57.801528116 +0000 UTC m=+1473.140920415" lastFinishedPulling="2025-12-05 01:35:04.990753782 +0000 UTC m=+1480.330146081" observedRunningTime="2025-12-05 01:35:06.041761791 +0000 UTC m=+1481.381154090" watchObservedRunningTime="2025-12-05 01:35:06.069754199 +0000 UTC m=+1481.409146498" Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.084379 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-9nqnv"] Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.101482 4665 scope.go:117] "RemoveContainer" containerID="02a7f65e33f49eae6044b2775ea7a60c718e98a854bbd72c9439c62762e196e3" Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.120741 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c9776ccc5-9nqnv"] Dec 05 01:35:06 crc kubenswrapper[4665]: W1205 01:35:06.560221 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod381ce009_2870_44a4_a156_e181fc862e7b.slice/crio-f8130f7ad33cf102d15ea440990d9e563ba3acdf3df28237c68cb43a639225bc WatchSource:0}: Error finding container f8130f7ad33cf102d15ea440990d9e563ba3acdf3df28237c68cb43a639225bc: Status 404 returned error can't find the container with id f8130f7ad33cf102d15ea440990d9e563ba3acdf3df28237c68cb43a639225bc Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.581636 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lxv5x"] Dec 05 01:35:06 crc kubenswrapper[4665]: I1205 01:35:06.904051 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7295fa1c-4087-4ae8-a38e-29edf3da8381" path="/var/lib/kubelet/pods/7295fa1c-4087-4ae8-a38e-29edf3da8381/volumes" Dec 05 01:35:07 crc kubenswrapper[4665]: I1205 01:35:07.028328 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cfb03a0e-eccb-4524-9ddb-a298064a6a24","Type":"ContainerStarted","Data":"9aac4c47098066a987a5d796cfb96fed9eb8fd995c7b55310a7e97b084abc373"} Dec 05 01:35:07 crc kubenswrapper[4665]: I1205 01:35:07.033656 4665 generic.go:334] "Generic (PLEG): container finished" podID="381ce009-2870-44a4-a156-e181fc862e7b" containerID="b9d47277ee1e4696518d66007521c04e344ecd4891abc24d51d0a35582debc7a" exitCode=0 Dec 05 01:35:07 crc kubenswrapper[4665]: I1205 01:35:07.033756 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxv5x" event={"ID":"381ce009-2870-44a4-a156-e181fc862e7b","Type":"ContainerDied","Data":"b9d47277ee1e4696518d66007521c04e344ecd4891abc24d51d0a35582debc7a"} Dec 05 01:35:07 crc kubenswrapper[4665]: I1205 01:35:07.033781 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxv5x" event={"ID":"381ce009-2870-44a4-a156-e181fc862e7b","Type":"ContainerStarted","Data":"f8130f7ad33cf102d15ea440990d9e563ba3acdf3df28237c68cb43a639225bc"} Dec 05 01:35:07 crc kubenswrapper[4665]: I1205 01:35:07.053703 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=3.053683855 podStartE2EDuration="3.053683855s" podCreationTimestamp="2025-12-05 01:35:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:07.047240559 +0000 UTC m=+1482.386632858" watchObservedRunningTime="2025-12-05 01:35:07.053683855 +0000 UTC m=+1482.393076154" Dec 05 01:35:08 crc kubenswrapper[4665]: I1205 01:35:08.048532 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxv5x" event={"ID":"381ce009-2870-44a4-a156-e181fc862e7b","Type":"ContainerStarted","Data":"2d1f9361a4e3b113fa0fbc5d57017afc1e73fc603050341a0056dc4059a5e8d7"} Dec 05 01:35:09 crc kubenswrapper[4665]: I1205 01:35:09.598650 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 01:35:09 crc kubenswrapper[4665]: I1205 01:35:09.600006 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 01:35:10 crc kubenswrapper[4665]: I1205 01:35:10.083392 4665 generic.go:334] "Generic (PLEG): container finished" podID="56a40c18-4bbe-4897-96ab-532ffb565321" containerID="dffed07b73eb3577e3c4ba61d5e18d3b7b2328bda7e18e4fae8d6773e88002ad" exitCode=0 Dec 05 01:35:10 crc kubenswrapper[4665]: I1205 01:35:10.085202 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tdhhv" event={"ID":"56a40c18-4bbe-4897-96ab-532ffb565321","Type":"ContainerDied","Data":"dffed07b73eb3577e3c4ba61d5e18d3b7b2328bda7e18e4fae8d6773e88002ad"} Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.689632 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.834119 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-config-data\") pod \"56a40c18-4bbe-4897-96ab-532ffb565321\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.834194 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jz5gw\" (UniqueName: \"kubernetes.io/projected/56a40c18-4bbe-4897-96ab-532ffb565321-kube-api-access-jz5gw\") pod \"56a40c18-4bbe-4897-96ab-532ffb565321\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.834381 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-combined-ca-bundle\") pod \"56a40c18-4bbe-4897-96ab-532ffb565321\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.834740 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-scripts\") pod \"56a40c18-4bbe-4897-96ab-532ffb565321\" (UID: \"56a40c18-4bbe-4897-96ab-532ffb565321\") " Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.852694 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-scripts" (OuterVolumeSpecName: "scripts") pod "56a40c18-4bbe-4897-96ab-532ffb565321" (UID: "56a40c18-4bbe-4897-96ab-532ffb565321"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.852890 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/56a40c18-4bbe-4897-96ab-532ffb565321-kube-api-access-jz5gw" (OuterVolumeSpecName: "kube-api-access-jz5gw") pod "56a40c18-4bbe-4897-96ab-532ffb565321" (UID: "56a40c18-4bbe-4897-96ab-532ffb565321"). InnerVolumeSpecName "kube-api-access-jz5gw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.870514 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-config-data" (OuterVolumeSpecName: "config-data") pod "56a40c18-4bbe-4897-96ab-532ffb565321" (UID: "56a40c18-4bbe-4897-96ab-532ffb565321"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.887960 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "56a40c18-4bbe-4897-96ab-532ffb565321" (UID: "56a40c18-4bbe-4897-96ab-532ffb565321"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.937627 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.937840 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.937967 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jz5gw\" (UniqueName: \"kubernetes.io/projected/56a40c18-4bbe-4897-96ab-532ffb565321-kube-api-access-jz5gw\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:11 crc kubenswrapper[4665]: I1205 01:35:11.938092 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56a40c18-4bbe-4897-96ab-532ffb565321-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.102908 4665 generic.go:334] "Generic (PLEG): container finished" podID="381ce009-2870-44a4-a156-e181fc862e7b" containerID="2d1f9361a4e3b113fa0fbc5d57017afc1e73fc603050341a0056dc4059a5e8d7" exitCode=0 Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.102971 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxv5x" event={"ID":"381ce009-2870-44a4-a156-e181fc862e7b","Type":"ContainerDied","Data":"2d1f9361a4e3b113fa0fbc5d57017afc1e73fc603050341a0056dc4059a5e8d7"} Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.107100 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-tdhhv" event={"ID":"56a40c18-4bbe-4897-96ab-532ffb565321","Type":"ContainerDied","Data":"0235f49de574c0322c2723969ae380fb7ad992941f7bbd6a07821bfa79d68e3e"} Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.107146 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0235f49de574c0322c2723969ae380fb7ad992941f7bbd6a07821bfa79d68e3e" Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.107206 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-tdhhv" Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.301342 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.301808 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-log" containerID="cri-o://293d123617a77cadd2d207403a86d70bf0e613a462a2275f2e0f45327c40cc56" gracePeriod=30 Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.301895 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-api" containerID="cri-o://9821f8dc0effcbcc2c0601af1813de6a2acced24cc7372c5a47515f8f166d06b" gracePeriod=30 Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.323810 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.324130 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="e35dc66b-8013-4cd5-96d3-a3d8f0faa070" containerName="nova-scheduler-scheduler" containerID="cri-o://021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031" gracePeriod=30 Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.350007 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.350278 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerName="nova-metadata-log" containerID="cri-o://69b582169d4db369fd62b6f83b2cd3930bc2f6a235862e8e85bdce0b58cf322d" gracePeriod=30 Dec 05 01:35:12 crc kubenswrapper[4665]: I1205 01:35:12.350789 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerName="nova-metadata-metadata" containerID="cri-o://9aac4c47098066a987a5d796cfb96fed9eb8fd995c7b55310a7e97b084abc373" gracePeriod=30 Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.127028 4665 generic.go:334] "Generic (PLEG): container finished" podID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerID="293d123617a77cadd2d207403a86d70bf0e613a462a2275f2e0f45327c40cc56" exitCode=143 Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.127378 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8","Type":"ContainerDied","Data":"293d123617a77cadd2d207403a86d70bf0e613a462a2275f2e0f45327c40cc56"} Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.152000 4665 generic.go:334] "Generic (PLEG): container finished" podID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerID="9aac4c47098066a987a5d796cfb96fed9eb8fd995c7b55310a7e97b084abc373" exitCode=0 Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.152031 4665 generic.go:334] "Generic (PLEG): container finished" podID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerID="69b582169d4db369fd62b6f83b2cd3930bc2f6a235862e8e85bdce0b58cf322d" exitCode=143 Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.152104 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cfb03a0e-eccb-4524-9ddb-a298064a6a24","Type":"ContainerDied","Data":"9aac4c47098066a987a5d796cfb96fed9eb8fd995c7b55310a7e97b084abc373"} Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.152130 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cfb03a0e-eccb-4524-9ddb-a298064a6a24","Type":"ContainerDied","Data":"69b582169d4db369fd62b6f83b2cd3930bc2f6a235862e8e85bdce0b58cf322d"} Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.159664 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxv5x" event={"ID":"381ce009-2870-44a4-a156-e181fc862e7b","Type":"ContainerStarted","Data":"cb05433370f15b8e44318128a550f7439d65e8a962b0c6ddd5190b56644f587d"} Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.209929 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lxv5x" podStartSLOduration=3.732635203 podStartE2EDuration="9.209911217s" podCreationTimestamp="2025-12-05 01:35:04 +0000 UTC" firstStartedPulling="2025-12-05 01:35:07.035944986 +0000 UTC m=+1482.375337285" lastFinishedPulling="2025-12-05 01:35:12.513221 +0000 UTC m=+1487.852613299" observedRunningTime="2025-12-05 01:35:13.205314436 +0000 UTC m=+1488.544706735" watchObservedRunningTime="2025-12-05 01:35:13.209911217 +0000 UTC m=+1488.549303516" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.507605 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.570279 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xppvt\" (UniqueName: \"kubernetes.io/projected/cfb03a0e-eccb-4524-9ddb-a298064a6a24-kube-api-access-xppvt\") pod \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.570410 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-config-data\") pod \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.570530 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-nova-metadata-tls-certs\") pod \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.570580 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfb03a0e-eccb-4524-9ddb-a298064a6a24-logs\") pod \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.570631 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-combined-ca-bundle\") pod \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\" (UID: \"cfb03a0e-eccb-4524-9ddb-a298064a6a24\") " Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.571606 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cfb03a0e-eccb-4524-9ddb-a298064a6a24-logs" (OuterVolumeSpecName: "logs") pod "cfb03a0e-eccb-4524-9ddb-a298064a6a24" (UID: "cfb03a0e-eccb-4524-9ddb-a298064a6a24"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.579202 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cfb03a0e-eccb-4524-9ddb-a298064a6a24-kube-api-access-xppvt" (OuterVolumeSpecName: "kube-api-access-xppvt") pod "cfb03a0e-eccb-4524-9ddb-a298064a6a24" (UID: "cfb03a0e-eccb-4524-9ddb-a298064a6a24"). InnerVolumeSpecName "kube-api-access-xppvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.610722 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-config-data" (OuterVolumeSpecName: "config-data") pod "cfb03a0e-eccb-4524-9ddb-a298064a6a24" (UID: "cfb03a0e-eccb-4524-9ddb-a298064a6a24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.644422 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cfb03a0e-eccb-4524-9ddb-a298064a6a24" (UID: "cfb03a0e-eccb-4524-9ddb-a298064a6a24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.673506 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/cfb03a0e-eccb-4524-9ddb-a298064a6a24-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.673541 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.673557 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xppvt\" (UniqueName: \"kubernetes.io/projected/cfb03a0e-eccb-4524-9ddb-a298064a6a24-kube-api-access-xppvt\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.673567 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.688850 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "cfb03a0e-eccb-4524-9ddb-a298064a6a24" (UID: "cfb03a0e-eccb-4524-9ddb-a298064a6a24"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:13 crc kubenswrapper[4665]: I1205 01:35:13.775424 4665 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/cfb03a0e-eccb-4524-9ddb-a298064a6a24-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.171804 4665 generic.go:334] "Generic (PLEG): container finished" podID="cffe2874-1254-4832-943d-59419d486859" containerID="d32d4c60f5a286b86e07ae60fbe95e3d9a21a73670e77f52117a1dc477781641" exitCode=0 Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.171875 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" event={"ID":"cffe2874-1254-4832-943d-59419d486859","Type":"ContainerDied","Data":"d32d4c60f5a286b86e07ae60fbe95e3d9a21a73670e77f52117a1dc477781641"} Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.175421 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"cfb03a0e-eccb-4524-9ddb-a298064a6a24","Type":"ContainerDied","Data":"fa719e73cde3454ed7d8a98e86accb442732ca8243f1ebd1e4b1c3d895b35037"} Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.175467 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.175473 4665 scope.go:117] "RemoveContainer" containerID="9aac4c47098066a987a5d796cfb96fed9eb8fd995c7b55310a7e97b084abc373" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.206683 4665 scope.go:117] "RemoveContainer" containerID="69b582169d4db369fd62b6f83b2cd3930bc2f6a235862e8e85bdce0b58cf322d" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.228344 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.240076 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.255362 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.255822 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerName="nova-metadata-metadata" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.255842 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerName="nova-metadata-metadata" Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.255857 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="56a40c18-4bbe-4897-96ab-532ffb565321" containerName="nova-manage" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.255863 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="56a40c18-4bbe-4897-96ab-532ffb565321" containerName="nova-manage" Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.255897 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7295fa1c-4087-4ae8-a38e-29edf3da8381" containerName="init" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.255903 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7295fa1c-4087-4ae8-a38e-29edf3da8381" containerName="init" Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.255920 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerName="nova-metadata-log" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.255926 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerName="nova-metadata-log" Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.255941 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7295fa1c-4087-4ae8-a38e-29edf3da8381" containerName="dnsmasq-dns" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.255947 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7295fa1c-4087-4ae8-a38e-29edf3da8381" containerName="dnsmasq-dns" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.256113 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerName="nova-metadata-metadata" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.256125 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7295fa1c-4087-4ae8-a38e-29edf3da8381" containerName="dnsmasq-dns" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.256140 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" containerName="nova-metadata-log" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.256159 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="56a40c18-4bbe-4897-96ab-532ffb565321" containerName="nova-manage" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.257178 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.260996 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.261320 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.264068 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.285592 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.285661 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nm674\" (UniqueName: \"kubernetes.io/projected/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-kube-api-access-nm674\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.285751 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-logs\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.285822 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-config-data\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.285885 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.291366 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.293056 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.300634 4665 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 01:35:14 crc kubenswrapper[4665]: E1205 01:35:14.300704 4665 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="e35dc66b-8013-4cd5-96d3-a3d8f0faa070" containerName="nova-scheduler-scheduler" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.388423 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.388489 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nm674\" (UniqueName: \"kubernetes.io/projected/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-kube-api-access-nm674\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.388580 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-logs\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.388641 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-config-data\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.388699 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.394458 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.396871 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.397345 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-logs\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.397802 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-config-data\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.433486 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nm674\" (UniqueName: \"kubernetes.io/projected/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-kube-api-access-nm674\") pod \"nova-metadata-0\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.570963 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:14 crc kubenswrapper[4665]: I1205 01:35:14.907831 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cfb03a0e-eccb-4524-9ddb-a298064a6a24" path="/var/lib/kubelet/pods/cfb03a0e-eccb-4524-9ddb-a298064a6a24/volumes" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.085662 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.198341 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e","Type":"ContainerStarted","Data":"b6e164a9bdacd7ee5cc443da1139fc14e62dfb9dd6077caed1b393c9627fd623"} Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.511546 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.511839 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.706533 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.824672 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-config-data\") pod \"cffe2874-1254-4832-943d-59419d486859\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.825235 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rwzsl\" (UniqueName: \"kubernetes.io/projected/cffe2874-1254-4832-943d-59419d486859-kube-api-access-rwzsl\") pod \"cffe2874-1254-4832-943d-59419d486859\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.825457 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-scripts\") pod \"cffe2874-1254-4832-943d-59419d486859\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.825514 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-combined-ca-bundle\") pod \"cffe2874-1254-4832-943d-59419d486859\" (UID: \"cffe2874-1254-4832-943d-59419d486859\") " Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.843217 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-scripts" (OuterVolumeSpecName: "scripts") pod "cffe2874-1254-4832-943d-59419d486859" (UID: "cffe2874-1254-4832-943d-59419d486859"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.853127 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cffe2874-1254-4832-943d-59419d486859-kube-api-access-rwzsl" (OuterVolumeSpecName: "kube-api-access-rwzsl") pod "cffe2874-1254-4832-943d-59419d486859" (UID: "cffe2874-1254-4832-943d-59419d486859"). InnerVolumeSpecName "kube-api-access-rwzsl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.877157 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-config-data" (OuterVolumeSpecName: "config-data") pod "cffe2874-1254-4832-943d-59419d486859" (UID: "cffe2874-1254-4832-943d-59419d486859"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.880895 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "cffe2874-1254-4832-943d-59419d486859" (UID: "cffe2874-1254-4832-943d-59419d486859"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.933918 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rwzsl\" (UniqueName: \"kubernetes.io/projected/cffe2874-1254-4832-943d-59419d486859-kube-api-access-rwzsl\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.933954 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.933965 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:15 crc kubenswrapper[4665]: I1205 01:35:15.933973 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/cffe2874-1254-4832-943d-59419d486859-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.207731 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" event={"ID":"cffe2874-1254-4832-943d-59419d486859","Type":"ContainerDied","Data":"0d1c8d889dc5e54d49082a852922862f6686cfcc38ef65e60dc129b1223034d8"} Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.207767 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0d1c8d889dc5e54d49082a852922862f6686cfcc38ef65e60dc129b1223034d8" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.207831 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-lqq2r" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.213974 4665 generic.go:334] "Generic (PLEG): container finished" podID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerID="9821f8dc0effcbcc2c0601af1813de6a2acced24cc7372c5a47515f8f166d06b" exitCode=0 Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.214056 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8","Type":"ContainerDied","Data":"9821f8dc0effcbcc2c0601af1813de6a2acced24cc7372c5a47515f8f166d06b"} Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.216129 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.220497 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e","Type":"ContainerStarted","Data":"d200baa5912d28a3b0b2bfbe61180d0fa92b71d068b63cd871bdb0547f64cb7d"} Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.220535 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e","Type":"ContainerStarted","Data":"0cd27e04f20931086f739ed9c91cba0917f0db4c4aea8c01005071e83405f977"} Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.280440 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.280417439 podStartE2EDuration="2.280417439s" podCreationTimestamp="2025-12-05 01:35:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:16.278637976 +0000 UTC m=+1491.618030275" watchObservedRunningTime="2025-12-05 01:35:16.280417439 +0000 UTC m=+1491.619809738" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.309241 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 01:35:16 crc kubenswrapper[4665]: E1205 01:35:16.310280 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-log" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.310316 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-log" Dec 05 01:35:16 crc kubenswrapper[4665]: E1205 01:35:16.310333 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-api" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.310340 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-api" Dec 05 01:35:16 crc kubenswrapper[4665]: E1205 01:35:16.310350 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cffe2874-1254-4832-943d-59419d486859" containerName="nova-cell1-conductor-db-sync" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.310357 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="cffe2874-1254-4832-943d-59419d486859" containerName="nova-cell1-conductor-db-sync" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.310571 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="cffe2874-1254-4832-943d-59419d486859" containerName="nova-cell1-conductor-db-sync" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.310594 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-api" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.310613 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" containerName="nova-api-log" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.311254 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.315899 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.342913 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-config-data\") pod \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.343265 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-logs\") pod \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.343289 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-combined-ca-bundle\") pod \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.343378 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jw6g4\" (UniqueName: \"kubernetes.io/projected/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-kube-api-access-jw6g4\") pod \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\" (UID: \"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8\") " Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.343868 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632d0371-eca3-499e-8993-07e8025175d3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.343914 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632d0371-eca3-499e-8993-07e8025175d3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.343960 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-drjlz\" (UniqueName: \"kubernetes.io/projected/632d0371-eca3-499e-8993-07e8025175d3-kube-api-access-drjlz\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.345086 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-logs" (OuterVolumeSpecName: "logs") pod "f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" (UID: "f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.346103 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.355252 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-kube-api-access-jw6g4" (OuterVolumeSpecName: "kube-api-access-jw6g4") pod "f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" (UID: "f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8"). InnerVolumeSpecName "kube-api-access-jw6g4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.391556 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" (UID: "f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.426388 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-config-data" (OuterVolumeSpecName: "config-data") pod "f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" (UID: "f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.446916 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632d0371-eca3-499e-8993-07e8025175d3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.446991 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-drjlz\" (UniqueName: \"kubernetes.io/projected/632d0371-eca3-499e-8993-07e8025175d3-kube-api-access-drjlz\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.447100 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632d0371-eca3-499e-8993-07e8025175d3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.447147 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jw6g4\" (UniqueName: \"kubernetes.io/projected/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-kube-api-access-jw6g4\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.447160 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.447169 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.447177 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.451476 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/632d0371-eca3-499e-8993-07e8025175d3-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.451523 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/632d0371-eca3-499e-8993-07e8025175d3-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.466827 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-drjlz\" (UniqueName: \"kubernetes.io/projected/632d0371-eca3-499e-8993-07e8025175d3-kube-api-access-drjlz\") pod \"nova-cell1-conductor-0\" (UID: \"632d0371-eca3-499e-8993-07e8025175d3\") " pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.581806 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lxv5x" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="registry-server" probeResult="failure" output=< Dec 05 01:35:16 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 01:35:16 crc kubenswrapper[4665]: > Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.643003 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.778016 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.854585 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v55cr\" (UniqueName: \"kubernetes.io/projected/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-kube-api-access-v55cr\") pod \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.854986 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-config-data\") pod \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.855097 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-combined-ca-bundle\") pod \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\" (UID: \"e35dc66b-8013-4cd5-96d3-a3d8f0faa070\") " Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.862578 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-kube-api-access-v55cr" (OuterVolumeSpecName: "kube-api-access-v55cr") pod "e35dc66b-8013-4cd5-96d3-a3d8f0faa070" (UID: "e35dc66b-8013-4cd5-96d3-a3d8f0faa070"). InnerVolumeSpecName "kube-api-access-v55cr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.902601 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e35dc66b-8013-4cd5-96d3-a3d8f0faa070" (UID: "e35dc66b-8013-4cd5-96d3-a3d8f0faa070"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.941534 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-config-data" (OuterVolumeSpecName: "config-data") pod "e35dc66b-8013-4cd5-96d3-a3d8f0faa070" (UID: "e35dc66b-8013-4cd5-96d3-a3d8f0faa070"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.959273 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v55cr\" (UniqueName: \"kubernetes.io/projected/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-kube-api-access-v55cr\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.959537 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:16 crc kubenswrapper[4665]: I1205 01:35:16.959640 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e35dc66b-8013-4cd5-96d3-a3d8f0faa070-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.137265 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.231983 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"632d0371-eca3-499e-8993-07e8025175d3","Type":"ContainerStarted","Data":"28d81e865d0753c5058a4c464010619f0632f4122ce826c92762591bb1622156"} Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.234232 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8","Type":"ContainerDied","Data":"9c95f25d39c6486e6981090c8f87fecde0d6d16162d74f48d6cca1d3723c6a89"} Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.234261 4665 scope.go:117] "RemoveContainer" containerID="9821f8dc0effcbcc2c0601af1813de6a2acced24cc7372c5a47515f8f166d06b" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.234409 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.261227 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.265084 4665 generic.go:334] "Generic (PLEG): container finished" podID="e35dc66b-8013-4cd5-96d3-a3d8f0faa070" containerID="021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031" exitCode=0 Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.265176 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.265237 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e35dc66b-8013-4cd5-96d3-a3d8f0faa070","Type":"ContainerDied","Data":"021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031"} Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.265271 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"e35dc66b-8013-4cd5-96d3-a3d8f0faa070","Type":"ContainerDied","Data":"21429af4ae47a6de87cba8e1eb6b132129df1309e0b317441701536a4fd55dee"} Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.274636 4665 scope.go:117] "RemoveContainer" containerID="293d123617a77cadd2d207403a86d70bf0e613a462a2275f2e0f45327c40cc56" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.293522 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.317151 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: E1205 01:35:17.317540 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e35dc66b-8013-4cd5-96d3-a3d8f0faa070" containerName="nova-scheduler-scheduler" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.317556 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e35dc66b-8013-4cd5-96d3-a3d8f0faa070" containerName="nova-scheduler-scheduler" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.317758 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="e35dc66b-8013-4cd5-96d3-a3d8f0faa070" containerName="nova-scheduler-scheduler" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.332074 4665 scope.go:117] "RemoveContainer" containerID="021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.341894 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.342009 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.348331 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.365930 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-logs\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.366017 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-config-data\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.366062 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gkw2\" (UniqueName: \"kubernetes.io/projected/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-kube-api-access-7gkw2\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.366226 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.376389 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.380084 4665 scope.go:117] "RemoveContainer" containerID="021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031" Dec 05 01:35:17 crc kubenswrapper[4665]: E1205 01:35:17.380527 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031\": container with ID starting with 021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031 not found: ID does not exist" containerID="021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.380554 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031"} err="failed to get container status \"021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031\": rpc error: code = NotFound desc = could not find container \"021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031\": container with ID starting with 021896abc8f23dabdc9772fbcb9e94f32f063d97f0fed27653691c9ab1fa1031 not found: ID does not exist" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.383913 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.394046 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.395279 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.399737 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.417822 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.468016 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gkw2\" (UniqueName: \"kubernetes.io/projected/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-kube-api-access-7gkw2\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.468174 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.468230 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-config-data\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.468269 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-96fs6\" (UniqueName: \"kubernetes.io/projected/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-kube-api-access-96fs6\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.468317 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-logs\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.468363 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.468382 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-config-data\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.469260 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-logs\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.472441 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.473526 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-config-data\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.486763 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gkw2\" (UniqueName: \"kubernetes.io/projected/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-kube-api-access-7gkw2\") pod \"nova-api-0\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.570479 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-config-data\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.570561 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-96fs6\" (UniqueName: \"kubernetes.io/projected/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-kube-api-access-96fs6\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.570620 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.573758 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-config-data\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.574172 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.589448 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-96fs6\" (UniqueName: \"kubernetes.io/projected/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-kube-api-access-96fs6\") pod \"nova-scheduler-0\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.667611 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:17 crc kubenswrapper[4665]: I1205 01:35:17.719081 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:35:18 crc kubenswrapper[4665]: I1205 01:35:18.159785 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:18 crc kubenswrapper[4665]: I1205 01:35:18.280005 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4fabc12-4da5-427a-abfd-ba88f51e7a8c","Type":"ContainerStarted","Data":"cec56c5d4292676dfd683c205fbdd1ffa17b330cf99999c20b06ac2f4cd7ff4f"} Dec 05 01:35:18 crc kubenswrapper[4665]: I1205 01:35:18.282708 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"632d0371-eca3-499e-8993-07e8025175d3","Type":"ContainerStarted","Data":"833ec943f9972ad31d28e3906dea6caa8fbe7b4c93c96a682ec08235f18a96da"} Dec 05 01:35:18 crc kubenswrapper[4665]: I1205 01:35:18.282800 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:18 crc kubenswrapper[4665]: I1205 01:35:18.303578 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:18 crc kubenswrapper[4665]: I1205 01:35:18.317513 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.317495813 podStartE2EDuration="2.317495813s" podCreationTimestamp="2025-12-05 01:35:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:18.304469708 +0000 UTC m=+1493.643862017" watchObservedRunningTime="2025-12-05 01:35:18.317495813 +0000 UTC m=+1493.656888112" Dec 05 01:35:18 crc kubenswrapper[4665]: I1205 01:35:18.903413 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e35dc66b-8013-4cd5-96d3-a3d8f0faa070" path="/var/lib/kubelet/pods/e35dc66b-8013-4cd5-96d3-a3d8f0faa070/volumes" Dec 05 01:35:18 crc kubenswrapper[4665]: I1205 01:35:18.904259 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8" path="/var/lib/kubelet/pods/f6fd7ae5-dbdd-41d5-9aa6-d61aa6abe8b8/volumes" Dec 05 01:35:19 crc kubenswrapper[4665]: I1205 01:35:19.293749 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1","Type":"ContainerStarted","Data":"4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0"} Dec 05 01:35:19 crc kubenswrapper[4665]: I1205 01:35:19.293797 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1","Type":"ContainerStarted","Data":"113d3c927fd1f4547b326532f4b08a9d1abd3bbb984688b51a5f2d9a49c7dc42"} Dec 05 01:35:19 crc kubenswrapper[4665]: I1205 01:35:19.296546 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4fabc12-4da5-427a-abfd-ba88f51e7a8c","Type":"ContainerStarted","Data":"a2fb8f1e5e460aea986d8778a7bf595a82333086196e093d698832233ccd4399"} Dec 05 01:35:19 crc kubenswrapper[4665]: I1205 01:35:19.296587 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4fabc12-4da5-427a-abfd-ba88f51e7a8c","Type":"ContainerStarted","Data":"239452dc173bfce15e6fe2916b7d9607504d625d7833c2a02709aaf2b8d9138d"} Dec 05 01:35:19 crc kubenswrapper[4665]: I1205 01:35:19.322979 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.322961017 podStartE2EDuration="2.322961017s" podCreationTimestamp="2025-12-05 01:35:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:19.315129808 +0000 UTC m=+1494.654522117" watchObservedRunningTime="2025-12-05 01:35:19.322961017 +0000 UTC m=+1494.662353316" Dec 05 01:35:19 crc kubenswrapper[4665]: I1205 01:35:19.348993 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.348969486 podStartE2EDuration="2.348969486s" podCreationTimestamp="2025-12-05 01:35:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:19.338754339 +0000 UTC m=+1494.678146638" watchObservedRunningTime="2025-12-05 01:35:19.348969486 +0000 UTC m=+1494.688361785" Dec 05 01:35:19 crc kubenswrapper[4665]: I1205 01:35:19.571918 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 01:35:19 crc kubenswrapper[4665]: I1205 01:35:19.572192 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 01:35:22 crc kubenswrapper[4665]: I1205 01:35:22.720046 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 01:35:24 crc kubenswrapper[4665]: I1205 01:35:24.571286 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 01:35:24 crc kubenswrapper[4665]: I1205 01:35:24.571633 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 01:35:25 crc kubenswrapper[4665]: I1205 01:35:25.586413 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 01:35:25 crc kubenswrapper[4665]: I1205 01:35:25.586422 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.197:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 01:35:26 crc kubenswrapper[4665]: I1205 01:35:26.563687 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-lxv5x" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="registry-server" probeResult="failure" output=< Dec 05 01:35:26 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 01:35:26 crc kubenswrapper[4665]: > Dec 05 01:35:26 crc kubenswrapper[4665]: I1205 01:35:26.676922 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 01:35:27 crc kubenswrapper[4665]: I1205 01:35:27.219013 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 01:35:27 crc kubenswrapper[4665]: I1205 01:35:27.668011 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 01:35:27 crc kubenswrapper[4665]: I1205 01:35:27.668070 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 01:35:27 crc kubenswrapper[4665]: I1205 01:35:27.719866 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 01:35:27 crc kubenswrapper[4665]: I1205 01:35:27.753223 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 01:35:28 crc kubenswrapper[4665]: I1205 01:35:28.397065 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 01:35:28 crc kubenswrapper[4665]: I1205 01:35:28.750496 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 01:35:28 crc kubenswrapper[4665]: I1205 01:35:28.750580 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.199:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.166212 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.251943 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-combined-ca-bundle\") pod \"16c94c27-cb84-4662-b838-9dd308b4eabb\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.252093 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-config-data\") pod \"16c94c27-cb84-4662-b838-9dd308b4eabb\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.252185 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-smm9x\" (UniqueName: \"kubernetes.io/projected/16c94c27-cb84-4662-b838-9dd308b4eabb-kube-api-access-smm9x\") pod \"16c94c27-cb84-4662-b838-9dd308b4eabb\" (UID: \"16c94c27-cb84-4662-b838-9dd308b4eabb\") " Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.258258 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16c94c27-cb84-4662-b838-9dd308b4eabb-kube-api-access-smm9x" (OuterVolumeSpecName: "kube-api-access-smm9x") pod "16c94c27-cb84-4662-b838-9dd308b4eabb" (UID: "16c94c27-cb84-4662-b838-9dd308b4eabb"). InnerVolumeSpecName "kube-api-access-smm9x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.286099 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-config-data" (OuterVolumeSpecName: "config-data") pod "16c94c27-cb84-4662-b838-9dd308b4eabb" (UID: "16c94c27-cb84-4662-b838-9dd308b4eabb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.304709 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "16c94c27-cb84-4662-b838-9dd308b4eabb" (UID: "16c94c27-cb84-4662-b838-9dd308b4eabb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.354282 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.354335 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/16c94c27-cb84-4662-b838-9dd308b4eabb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.354346 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-smm9x\" (UniqueName: \"kubernetes.io/projected/16c94c27-cb84-4662-b838-9dd308b4eabb-kube-api-access-smm9x\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.414994 4665 generic.go:334] "Generic (PLEG): container finished" podID="16c94c27-cb84-4662-b838-9dd308b4eabb" containerID="a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc" exitCode=137 Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.415073 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.415108 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"16c94c27-cb84-4662-b838-9dd308b4eabb","Type":"ContainerDied","Data":"a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc"} Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.415473 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"16c94c27-cb84-4662-b838-9dd308b4eabb","Type":"ContainerDied","Data":"4e5785efc55077f5efd8a957de67c54ce7bd26eaf7b6cced43b9098bc74bfc00"} Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.415499 4665 scope.go:117] "RemoveContainer" containerID="a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.437110 4665 scope.go:117] "RemoveContainer" containerID="a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc" Dec 05 01:35:32 crc kubenswrapper[4665]: E1205 01:35:32.437901 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc\": container with ID starting with a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc not found: ID does not exist" containerID="a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.437935 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc"} err="failed to get container status \"a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc\": rpc error: code = NotFound desc = could not find container \"a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc\": container with ID starting with a0d36f35a5202b3f7b7ddf03aec638446e490febb742b9d0ed62a0975886c9bc not found: ID does not exist" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.454043 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.463388 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.477457 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:35:32 crc kubenswrapper[4665]: E1205 01:35:32.478193 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16c94c27-cb84-4662-b838-9dd308b4eabb" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.478353 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="16c94c27-cb84-4662-b838-9dd308b4eabb" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.478702 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="16c94c27-cb84-4662-b838-9dd308b4eabb" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.479611 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.482034 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.482269 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.482347 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.498367 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.556586 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.556838 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.557034 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.557152 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.557257 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdqfl\" (UniqueName: \"kubernetes.io/projected/a32e8029-fd7e-4662-8fbe-b83deaea60c8-kube-api-access-tdqfl\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.659420 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.659471 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.659494 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdqfl\" (UniqueName: \"kubernetes.io/projected/a32e8029-fd7e-4662-8fbe-b83deaea60c8-kube-api-access-tdqfl\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.659565 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.659586 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.664014 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.664272 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.664679 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.664866 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/a32e8029-fd7e-4662-8fbe-b83deaea60c8-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.676890 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdqfl\" (UniqueName: \"kubernetes.io/projected/a32e8029-fd7e-4662-8fbe-b83deaea60c8-kube-api-access-tdqfl\") pod \"nova-cell1-novncproxy-0\" (UID: \"a32e8029-fd7e-4662-8fbe-b83deaea60c8\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.802838 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:32 crc kubenswrapper[4665]: I1205 01:35:32.910803 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="16c94c27-cb84-4662-b838-9dd308b4eabb" path="/var/lib/kubelet/pods/16c94c27-cb84-4662-b838-9dd308b4eabb/volumes" Dec 05 01:35:33 crc kubenswrapper[4665]: I1205 01:35:33.263574 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 01:35:33 crc kubenswrapper[4665]: I1205 01:35:33.435515 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a32e8029-fd7e-4662-8fbe-b83deaea60c8","Type":"ContainerStarted","Data":"76a3c8426733da053eabdbcc403e99628692769240a2ccbc24e4981a572fd142"} Dec 05 01:35:34 crc kubenswrapper[4665]: I1205 01:35:34.453882 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"a32e8029-fd7e-4662-8fbe-b83deaea60c8","Type":"ContainerStarted","Data":"8762d4f744cc64a07417efa6ad79b8ba16a6e8b89622a7e1209f351f3c630f53"} Dec 05 01:35:34 crc kubenswrapper[4665]: I1205 01:35:34.474813 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=2.474796589 podStartE2EDuration="2.474796589s" podCreationTimestamp="2025-12-05 01:35:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:34.468089467 +0000 UTC m=+1509.807481786" watchObservedRunningTime="2025-12-05 01:35:34.474796589 +0000 UTC m=+1509.814188888" Dec 05 01:35:34 crc kubenswrapper[4665]: I1205 01:35:34.576578 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 01:35:34 crc kubenswrapper[4665]: I1205 01:35:34.578118 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 01:35:34 crc kubenswrapper[4665]: I1205 01:35:34.583860 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 01:35:35 crc kubenswrapper[4665]: I1205 01:35:35.471135 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 01:35:35 crc kubenswrapper[4665]: I1205 01:35:35.611393 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:35 crc kubenswrapper[4665]: I1205 01:35:35.671019 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:36 crc kubenswrapper[4665]: I1205 01:35:36.071653 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lxv5x"] Dec 05 01:35:37 crc kubenswrapper[4665]: I1205 01:35:37.481211 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lxv5x" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="registry-server" containerID="cri-o://cb05433370f15b8e44318128a550f7439d65e8a962b0c6ddd5190b56644f587d" gracePeriod=2 Dec 05 01:35:38 crc kubenswrapper[4665]: I1205 01:35:38.455692 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:38 crc kubenswrapper[4665]: I1205 01:35:38.512572 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 01:35:38 crc kubenswrapper[4665]: I1205 01:35:38.513639 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 01:35:38 crc kubenswrapper[4665]: I1205 01:35:38.543512 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 01:35:38 crc kubenswrapper[4665]: I1205 01:35:38.550712 4665 generic.go:334] "Generic (PLEG): container finished" podID="381ce009-2870-44a4-a156-e181fc862e7b" containerID="cb05433370f15b8e44318128a550f7439d65e8a962b0c6ddd5190b56644f587d" exitCode=0 Dec 05 01:35:38 crc kubenswrapper[4665]: I1205 01:35:38.550768 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxv5x" event={"ID":"381ce009-2870-44a4-a156-e181fc862e7b","Type":"ContainerDied","Data":"cb05433370f15b8e44318128a550f7439d65e8a962b0c6ddd5190b56644f587d"} Dec 05 01:35:38 crc kubenswrapper[4665]: I1205 01:35:38.561759 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 01:35:38 crc kubenswrapper[4665]: I1205 01:35:38.956963 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.048526 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtzmk\" (UniqueName: \"kubernetes.io/projected/381ce009-2870-44a4-a156-e181fc862e7b-kube-api-access-wtzmk\") pod \"381ce009-2870-44a4-a156-e181fc862e7b\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.048569 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-catalog-content\") pod \"381ce009-2870-44a4-a156-e181fc862e7b\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.048713 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-utilities\") pod \"381ce009-2870-44a4-a156-e181fc862e7b\" (UID: \"381ce009-2870-44a4-a156-e181fc862e7b\") " Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.050089 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-utilities" (OuterVolumeSpecName: "utilities") pod "381ce009-2870-44a4-a156-e181fc862e7b" (UID: "381ce009-2870-44a4-a156-e181fc862e7b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.055904 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/381ce009-2870-44a4-a156-e181fc862e7b-kube-api-access-wtzmk" (OuterVolumeSpecName: "kube-api-access-wtzmk") pod "381ce009-2870-44a4-a156-e181fc862e7b" (UID: "381ce009-2870-44a4-a156-e181fc862e7b"). InnerVolumeSpecName "kube-api-access-wtzmk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.152873 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtzmk\" (UniqueName: \"kubernetes.io/projected/381ce009-2870-44a4-a156-e181fc862e7b-kube-api-access-wtzmk\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.152920 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.163469 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "381ce009-2870-44a4-a156-e181fc862e7b" (UID: "381ce009-2870-44a4-a156-e181fc862e7b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.255025 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/381ce009-2870-44a4-a156-e181fc862e7b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.561899 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lxv5x" event={"ID":"381ce009-2870-44a4-a156-e181fc862e7b","Type":"ContainerDied","Data":"f8130f7ad33cf102d15ea440990d9e563ba3acdf3df28237c68cb43a639225bc"} Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.562256 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.562278 4665 scope.go:117] "RemoveContainer" containerID="cb05433370f15b8e44318128a550f7439d65e8a962b0c6ddd5190b56644f587d" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.561948 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lxv5x" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.574473 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.590236 4665 scope.go:117] "RemoveContainer" containerID="2d1f9361a4e3b113fa0fbc5d57017afc1e73fc603050341a0056dc4059a5e8d7" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.635523 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lxv5x"] Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.648304 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lxv5x"] Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.653487 4665 scope.go:117] "RemoveContainer" containerID="b9d47277ee1e4696518d66007521c04e344ecd4891abc24d51d0a35582debc7a" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.808582 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-7bn8l"] Dec 05 01:35:39 crc kubenswrapper[4665]: E1205 01:35:39.808963 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="extract-content" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.808979 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="extract-content" Dec 05 01:35:39 crc kubenswrapper[4665]: E1205 01:35:39.809001 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="extract-utilities" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.809008 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="extract-utilities" Dec 05 01:35:39 crc kubenswrapper[4665]: E1205 01:35:39.809025 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="registry-server" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.809031 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="registry-server" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.811516 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="381ce009-2870-44a4-a156-e181fc862e7b" containerName="registry-server" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.812542 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.866972 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-7bn8l"] Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.977389 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.977443 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.977498 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-config\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.977531 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkfjq\" (UniqueName: \"kubernetes.io/projected/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-kube-api-access-dkfjq\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.977568 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:39 crc kubenswrapper[4665]: I1205 01:35:39.977717 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.079905 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.079961 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.079987 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.080019 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-config\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.080042 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkfjq\" (UniqueName: \"kubernetes.io/projected/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-kube-api-access-dkfjq\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.080067 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.081141 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-swift-storage-0\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.081154 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-nb\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.081154 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-sb\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.081224 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-config\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.081913 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-svc\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.102911 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkfjq\" (UniqueName: \"kubernetes.io/projected/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-kube-api-access-dkfjq\") pod \"dnsmasq-dns-89c5cd4d5-7bn8l\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.140734 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.694136 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-7bn8l"] Dec 05 01:35:40 crc kubenswrapper[4665]: I1205 01:35:40.904591 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="381ce009-2870-44a4-a156-e181fc862e7b" path="/var/lib/kubelet/pods/381ce009-2870-44a4-a156-e181fc862e7b/volumes" Dec 05 01:35:41 crc kubenswrapper[4665]: I1205 01:35:41.584048 4665 generic.go:334] "Generic (PLEG): container finished" podID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" containerID="5d6e3975e2811d6512f26236679f4aea38bd803abf815fff70d50e20470c18a3" exitCode=0 Dec 05 01:35:41 crc kubenswrapper[4665]: I1205 01:35:41.584151 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" event={"ID":"a14df5c5-95ad-4881-ab5a-d21c81eb6aad","Type":"ContainerDied","Data":"5d6e3975e2811d6512f26236679f4aea38bd803abf815fff70d50e20470c18a3"} Dec 05 01:35:41 crc kubenswrapper[4665]: I1205 01:35:41.584441 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" event={"ID":"a14df5c5-95ad-4881-ab5a-d21c81eb6aad","Type":"ContainerStarted","Data":"7e13d17c8b617ae6c0c9f68a12d1516a54ffd725178b9f5bbe7567b61a257459"} Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.596196 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" event={"ID":"a14df5c5-95ad-4881-ab5a-d21c81eb6aad","Type":"ContainerStarted","Data":"b0503487f0a11712bcfc8edd04e9add1b1ccea571e40ee497cec9e17ccee4bb8"} Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.597533 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.622179 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" podStartSLOduration=3.622161149 podStartE2EDuration="3.622161149s" podCreationTimestamp="2025-12-05 01:35:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:42.618671865 +0000 UTC m=+1517.958064164" watchObservedRunningTime="2025-12-05 01:35:42.622161149 +0000 UTC m=+1517.961553448" Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.803031 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.819979 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.949189 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.949534 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="ceilometer-central-agent" containerID="cri-o://9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29" gracePeriod=30 Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.949778 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="sg-core" containerID="cri-o://129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b" gracePeriod=30 Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.949788 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="ceilometer-notification-agent" containerID="cri-o://561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa" gracePeriod=30 Dec 05 01:35:42 crc kubenswrapper[4665]: I1205 01:35:42.950014 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="proxy-httpd" containerID="cri-o://a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6" gracePeriod=30 Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.050521 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.052867 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-log" containerID="cri-o://239452dc173bfce15e6fe2916b7d9607504d625d7833c2a02709aaf2b8d9138d" gracePeriod=30 Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.053343 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-api" containerID="cri-o://a2fb8f1e5e460aea986d8778a7bf595a82333086196e093d698832233ccd4399" gracePeriod=30 Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.606269 4665 generic.go:334] "Generic (PLEG): container finished" podID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerID="a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6" exitCode=0 Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.606314 4665 generic.go:334] "Generic (PLEG): container finished" podID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerID="129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b" exitCode=2 Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.606323 4665 generic.go:334] "Generic (PLEG): container finished" podID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerID="9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29" exitCode=0 Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.606349 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerDied","Data":"a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6"} Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.606407 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerDied","Data":"129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b"} Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.606422 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerDied","Data":"9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29"} Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.608181 4665 generic.go:334] "Generic (PLEG): container finished" podID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerID="239452dc173bfce15e6fe2916b7d9607504d625d7833c2a02709aaf2b8d9138d" exitCode=143 Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.608362 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4fabc12-4da5-427a-abfd-ba88f51e7a8c","Type":"ContainerDied","Data":"239452dc173bfce15e6fe2916b7d9607504d625d7833c2a02709aaf2b8d9138d"} Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.633999 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.842539 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-lljbq"] Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.844433 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.845863 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.857498 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-lljbq"] Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.858411 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.972054 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-config-data\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.972632 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6m8cd\" (UniqueName: \"kubernetes.io/projected/109c1163-805a-4399-b593-66f29ff2046c-kube-api-access-6m8cd\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.972692 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:43 crc kubenswrapper[4665]: I1205 01:35:43.972821 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-scripts\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.073975 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-scripts\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.074031 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-config-data\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.074202 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6m8cd\" (UniqueName: \"kubernetes.io/projected/109c1163-805a-4399-b593-66f29ff2046c-kube-api-access-6m8cd\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.074222 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.079856 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-scripts\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.080045 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-config-data\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.080823 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.101099 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6m8cd\" (UniqueName: \"kubernetes.io/projected/109c1163-805a-4399-b593-66f29ff2046c-kube-api-access-6m8cd\") pod \"nova-cell1-cell-mapping-lljbq\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.163933 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:44 crc kubenswrapper[4665]: I1205 01:35:44.787528 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-lljbq"] Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.251729 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.429856 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-run-httpd\") pod \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430086 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lbwzf\" (UniqueName: \"kubernetes.io/projected/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-kube-api-access-lbwzf\") pod \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430120 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-ceilometer-tls-certs\") pod \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430173 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-log-httpd\") pod \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430203 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-sg-core-conf-yaml\") pod \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430263 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-config-data\") pod \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430305 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" (UID: "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430363 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-scripts\") pod \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430390 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-combined-ca-bundle\") pod \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\" (UID: \"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d\") " Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430597 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" (UID: "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430716 4665 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.430734 4665 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.439827 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-scripts" (OuterVolumeSpecName: "scripts") pod "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" (UID: "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.447495 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-kube-api-access-lbwzf" (OuterVolumeSpecName: "kube-api-access-lbwzf") pod "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" (UID: "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d"). InnerVolumeSpecName "kube-api-access-lbwzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.491488 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" (UID: "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.534651 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lbwzf\" (UniqueName: \"kubernetes.io/projected/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-kube-api-access-lbwzf\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.534675 4665 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.534686 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.546372 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" (UID: "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.574168 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" (UID: "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.577913 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-config-data" (OuterVolumeSpecName: "config-data") pod "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" (UID: "244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.631145 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lljbq" event={"ID":"109c1163-805a-4399-b593-66f29ff2046c","Type":"ContainerStarted","Data":"7f3850628abfa8cfcd2eb864137177dc10758be1f6ce19b53bbc65e712ca5ef5"} Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.631183 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lljbq" event={"ID":"109c1163-805a-4399-b593-66f29ff2046c","Type":"ContainerStarted","Data":"230a8821c7fb5ae543e868598194c73fa60cd57231b513232546d0d9f47bb5a3"} Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.635871 4665 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.635892 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.635901 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.636719 4665 generic.go:334] "Generic (PLEG): container finished" podID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerID="561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa" exitCode=0 Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.636751 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerDied","Data":"561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa"} Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.636772 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d","Type":"ContainerDied","Data":"a03b13cc8dd66ad952717657320087391c135257eb007f3d8dfbe6bdae651044"} Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.636789 4665 scope.go:117] "RemoveContainer" containerID="a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.636829 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.651717 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-lljbq" podStartSLOduration=2.6516982799999997 podStartE2EDuration="2.65169828s" podCreationTimestamp="2025-12-05 01:35:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:45.650779297 +0000 UTC m=+1520.990171606" watchObservedRunningTime="2025-12-05 01:35:45.65169828 +0000 UTC m=+1520.991090579" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.656342 4665 scope.go:117] "RemoveContainer" containerID="129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.683345 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.690863 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.709940 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:45 crc kubenswrapper[4665]: E1205 01:35:45.710703 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="ceilometer-notification-agent" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.710724 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="ceilometer-notification-agent" Dec 05 01:35:45 crc kubenswrapper[4665]: E1205 01:35:45.710749 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="ceilometer-central-agent" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.710756 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="ceilometer-central-agent" Dec 05 01:35:45 crc kubenswrapper[4665]: E1205 01:35:45.710769 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="proxy-httpd" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.710775 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="proxy-httpd" Dec 05 01:35:45 crc kubenswrapper[4665]: E1205 01:35:45.710789 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="sg-core" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.710795 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="sg-core" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.710973 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="sg-core" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.710990 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="ceilometer-central-agent" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.711004 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="proxy-httpd" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.711020 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" containerName="ceilometer-notification-agent" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.717190 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.721158 4665 scope.go:117] "RemoveContainer" containerID="561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.722279 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.723055 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.723204 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.735329 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.736634 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-log-httpd\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.736719 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-config-data\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.736740 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-scripts\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.736764 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.736784 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.736814 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-psc55\" (UniqueName: \"kubernetes.io/projected/b4996c4c-1b2a-4bd2-b865-9fa197505120-kube-api-access-psc55\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.736857 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.736885 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-run-httpd\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.795440 4665 scope.go:117] "RemoveContainer" containerID="9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.837447 4665 scope.go:117] "RemoveContainer" containerID="a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6" Dec 05 01:35:45 crc kubenswrapper[4665]: E1205 01:35:45.837808 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6\": container with ID starting with a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6 not found: ID does not exist" containerID="a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.837877 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6"} err="failed to get container status \"a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6\": rpc error: code = NotFound desc = could not find container \"a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6\": container with ID starting with a8ff0a907843b2b3e07fe311e34bad5f58de2a4ca73292d6e0263db79af950d6 not found: ID does not exist" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.837907 4665 scope.go:117] "RemoveContainer" containerID="129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.838861 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-config-data\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.838910 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-scripts\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.838952 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.838984 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.839032 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-psc55\" (UniqueName: \"kubernetes.io/projected/b4996c4c-1b2a-4bd2-b865-9fa197505120-kube-api-access-psc55\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.839155 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.839248 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-run-httpd\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.839355 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-log-httpd\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: E1205 01:35:45.839637 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b\": container with ID starting with 129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b not found: ID does not exist" containerID="129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.839679 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b"} err="failed to get container status \"129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b\": rpc error: code = NotFound desc = could not find container \"129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b\": container with ID starting with 129f5c35436fcdebdb055432deaea24b24431ee557e54060bb9112aa6ea0fd5b not found: ID does not exist" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.839709 4665 scope.go:117] "RemoveContainer" containerID="561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.840128 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-log-httpd\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: E1205 01:35:45.840801 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa\": container with ID starting with 561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa not found: ID does not exist" containerID="561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.840853 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa"} err="failed to get container status \"561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa\": rpc error: code = NotFound desc = could not find container \"561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa\": container with ID starting with 561744087c5b5314ce1fdd1e87ce3734c66c6d69c2717214b152e9e333fb8cfa not found: ID does not exist" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.840881 4665 scope.go:117] "RemoveContainer" containerID="9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.841042 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-run-httpd\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: E1205 01:35:45.841140 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29\": container with ID starting with 9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29 not found: ID does not exist" containerID="9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.841163 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29"} err="failed to get container status \"9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29\": rpc error: code = NotFound desc = could not find container \"9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29\": container with ID starting with 9d6677faf49b5558c4aeab725be42fc8c2c759a9271e54a55084186b85f56a29 not found: ID does not exist" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.842988 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-scripts\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.844816 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.861555 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.867029 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-config-data\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.867873 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:45 crc kubenswrapper[4665]: I1205 01:35:45.878960 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-psc55\" (UniqueName: \"kubernetes.io/projected/b4996c4c-1b2a-4bd2-b865-9fa197505120-kube-api-access-psc55\") pod \"ceilometer-0\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " pod="openstack/ceilometer-0" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.092204 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.649604 4665 generic.go:334] "Generic (PLEG): container finished" podID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerID="a2fb8f1e5e460aea986d8778a7bf595a82333086196e093d698832233ccd4399" exitCode=0 Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.649919 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4fabc12-4da5-427a-abfd-ba88f51e7a8c","Type":"ContainerDied","Data":"a2fb8f1e5e460aea986d8778a7bf595a82333086196e093d698832233ccd4399"} Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.683482 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.772619 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.866205 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-config-data\") pod \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.866539 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gkw2\" (UniqueName: \"kubernetes.io/projected/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-kube-api-access-7gkw2\") pod \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.866635 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-combined-ca-bundle\") pod \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.866664 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-logs\") pod \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\" (UID: \"d4fabc12-4da5-427a-abfd-ba88f51e7a8c\") " Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.867782 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-logs" (OuterVolumeSpecName: "logs") pod "d4fabc12-4da5-427a-abfd-ba88f51e7a8c" (UID: "d4fabc12-4da5-427a-abfd-ba88f51e7a8c"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.878236 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-kube-api-access-7gkw2" (OuterVolumeSpecName: "kube-api-access-7gkw2") pod "d4fabc12-4da5-427a-abfd-ba88f51e7a8c" (UID: "d4fabc12-4da5-427a-abfd-ba88f51e7a8c"). InnerVolumeSpecName "kube-api-access-7gkw2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.926348 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-config-data" (OuterVolumeSpecName: "config-data") pod "d4fabc12-4da5-427a-abfd-ba88f51e7a8c" (UID: "d4fabc12-4da5-427a-abfd-ba88f51e7a8c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.926739 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d" path="/var/lib/kubelet/pods/244e0d08-29c3-4fdb-b29a-d1e5cdb11f1d/volumes" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.942146 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "d4fabc12-4da5-427a-abfd-ba88f51e7a8c" (UID: "d4fabc12-4da5-427a-abfd-ba88f51e7a8c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.969937 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.969960 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.969971 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:46 crc kubenswrapper[4665]: I1205 01:35:46.969980 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gkw2\" (UniqueName: \"kubernetes.io/projected/d4fabc12-4da5-427a-abfd-ba88f51e7a8c-kube-api-access-7gkw2\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.106023 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.665876 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"d4fabc12-4da5-427a-abfd-ba88f51e7a8c","Type":"ContainerDied","Data":"cec56c5d4292676dfd683c205fbdd1ffa17b330cf99999c20b06ac2f4cd7ff4f"} Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.666171 4665 scope.go:117] "RemoveContainer" containerID="a2fb8f1e5e460aea986d8778a7bf595a82333086196e093d698832233ccd4399" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.666314 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.671101 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerStarted","Data":"2afadf7b77a8d440d719bf7c743e8c5fbd59076ba1ed071234daca7c4f4c02b0"} Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.671159 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerStarted","Data":"fd63a1fa6ad29e7d5fe2006d97e7be281d71395b651bf1342c100095543ddef5"} Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.700165 4665 scope.go:117] "RemoveContainer" containerID="239452dc173bfce15e6fe2916b7d9607504d625d7833c2a02709aaf2b8d9138d" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.700169 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.710074 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.757122 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:47 crc kubenswrapper[4665]: E1205 01:35:47.757522 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-api" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.757538 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-api" Dec 05 01:35:47 crc kubenswrapper[4665]: E1205 01:35:47.757557 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-log" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.757564 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-log" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.757776 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-log" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.757804 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" containerName="nova-api-api" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.774846 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.782693 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.782857 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.783031 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.785011 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-public-tls-certs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.785070 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-config-data\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.785086 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-logs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.785104 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.785126 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.785165 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jd68s\" (UniqueName: \"kubernetes.io/projected/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-kube-api-access-jd68s\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.804197 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.887095 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-public-tls-certs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.887191 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-config-data\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.887213 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-logs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.887233 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.887257 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.887316 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jd68s\" (UniqueName: \"kubernetes.io/projected/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-kube-api-access-jd68s\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.888961 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-logs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.897992 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.902951 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-public-tls-certs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.918704 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.920988 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jd68s\" (UniqueName: \"kubernetes.io/projected/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-kube-api-access-jd68s\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:47 crc kubenswrapper[4665]: I1205 01:35:47.925919 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-config-data\") pod \"nova-api-0\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " pod="openstack/nova-api-0" Dec 05 01:35:48 crc kubenswrapper[4665]: I1205 01:35:48.122459 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:48 crc kubenswrapper[4665]: I1205 01:35:48.649501 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:48 crc kubenswrapper[4665]: W1205 01:35:48.658614 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5e3bcf3c_5d09_4120_88dc_5a5eafebb5f5.slice/crio-41c117dc209561bc342fc57782155bcf2f01174a7892975924841dabe4758f97 WatchSource:0}: Error finding container 41c117dc209561bc342fc57782155bcf2f01174a7892975924841dabe4758f97: Status 404 returned error can't find the container with id 41c117dc209561bc342fc57782155bcf2f01174a7892975924841dabe4758f97 Dec 05 01:35:48 crc kubenswrapper[4665]: I1205 01:35:48.684179 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerStarted","Data":"ddafcdff0338dbf1ab325b923766f30c929e5ba1e8230d7aa9a0bdbb37148dff"} Dec 05 01:35:48 crc kubenswrapper[4665]: I1205 01:35:48.685616 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5","Type":"ContainerStarted","Data":"41c117dc209561bc342fc57782155bcf2f01174a7892975924841dabe4758f97"} Dec 05 01:35:48 crc kubenswrapper[4665]: I1205 01:35:48.946088 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4fabc12-4da5-427a-abfd-ba88f51e7a8c" path="/var/lib/kubelet/pods/d4fabc12-4da5-427a-abfd-ba88f51e7a8c/volumes" Dec 05 01:35:49 crc kubenswrapper[4665]: I1205 01:35:49.695318 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5","Type":"ContainerStarted","Data":"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d"} Dec 05 01:35:49 crc kubenswrapper[4665]: I1205 01:35:49.695806 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5","Type":"ContainerStarted","Data":"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458"} Dec 05 01:35:49 crc kubenswrapper[4665]: I1205 01:35:49.698568 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerStarted","Data":"21dd5d6d818fc5924cba5dfce43bced3241ed12f90e9af0757e23286e95af063"} Dec 05 01:35:49 crc kubenswrapper[4665]: I1205 01:35:49.722843 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.722823908 podStartE2EDuration="2.722823908s" podCreationTimestamp="2025-12-05 01:35:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:49.713866931 +0000 UTC m=+1525.053259230" watchObservedRunningTime="2025-12-05 01:35:49.722823908 +0000 UTC m=+1525.062216207" Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.142095 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.230901 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-cqsxn"] Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.231144 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" podUID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" containerName="dnsmasq-dns" containerID="cri-o://8dddd25fa836af08b94cc444e895d5eeef63ef3292ffd922b070e72f2453a6e0" gracePeriod=10 Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.709625 4665 generic.go:334] "Generic (PLEG): container finished" podID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" containerID="8dddd25fa836af08b94cc444e895d5eeef63ef3292ffd922b070e72f2453a6e0" exitCode=0 Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.710649 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" event={"ID":"44e024d0-ebb1-4a1f-9761-f47b20539a2f","Type":"ContainerDied","Data":"8dddd25fa836af08b94cc444e895d5eeef63ef3292ffd922b070e72f2453a6e0"} Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.808086 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.963051 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-nb\") pod \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.963151 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b9qzl\" (UniqueName: \"kubernetes.io/projected/44e024d0-ebb1-4a1f-9761-f47b20539a2f-kube-api-access-b9qzl\") pod \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.963278 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-svc\") pod \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.963343 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-swift-storage-0\") pod \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.963407 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-sb\") pod \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.963517 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-config\") pod \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\" (UID: \"44e024d0-ebb1-4a1f-9761-f47b20539a2f\") " Dec 05 01:35:50 crc kubenswrapper[4665]: I1205 01:35:50.972876 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44e024d0-ebb1-4a1f-9761-f47b20539a2f-kube-api-access-b9qzl" (OuterVolumeSpecName: "kube-api-access-b9qzl") pod "44e024d0-ebb1-4a1f-9761-f47b20539a2f" (UID: "44e024d0-ebb1-4a1f-9761-f47b20539a2f"). InnerVolumeSpecName "kube-api-access-b9qzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.016210 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "44e024d0-ebb1-4a1f-9761-f47b20539a2f" (UID: "44e024d0-ebb1-4a1f-9761-f47b20539a2f"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.027823 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-config" (OuterVolumeSpecName: "config") pod "44e024d0-ebb1-4a1f-9761-f47b20539a2f" (UID: "44e024d0-ebb1-4a1f-9761-f47b20539a2f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.032560 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "44e024d0-ebb1-4a1f-9761-f47b20539a2f" (UID: "44e024d0-ebb1-4a1f-9761-f47b20539a2f"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.050762 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "44e024d0-ebb1-4a1f-9761-f47b20539a2f" (UID: "44e024d0-ebb1-4a1f-9761-f47b20539a2f"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.061826 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "44e024d0-ebb1-4a1f-9761-f47b20539a2f" (UID: "44e024d0-ebb1-4a1f-9761-f47b20539a2f"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.066942 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.066968 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.066979 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b9qzl\" (UniqueName: \"kubernetes.io/projected/44e024d0-ebb1-4a1f-9761-f47b20539a2f-kube-api-access-b9qzl\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.066988 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.066996 4665 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.067007 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/44e024d0-ebb1-4a1f-9761-f47b20539a2f-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.723252 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerStarted","Data":"490d798af8e52b75af2865a9ad2d7ff47a01fba415d5ea02f08b2788c0043ad0"} Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.723841 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="ceilometer-central-agent" containerID="cri-o://2afadf7b77a8d440d719bf7c743e8c5fbd59076ba1ed071234daca7c4f4c02b0" gracePeriod=30 Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.724099 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.726518 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="proxy-httpd" containerID="cri-o://490d798af8e52b75af2865a9ad2d7ff47a01fba415d5ea02f08b2788c0043ad0" gracePeriod=30 Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.726669 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="sg-core" containerID="cri-o://21dd5d6d818fc5924cba5dfce43bced3241ed12f90e9af0757e23286e95af063" gracePeriod=30 Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.726844 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="ceilometer-notification-agent" containerID="cri-o://ddafcdff0338dbf1ab325b923766f30c929e5ba1e8230d7aa9a0bdbb37148dff" gracePeriod=30 Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.735806 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" event={"ID":"44e024d0-ebb1-4a1f-9761-f47b20539a2f","Type":"ContainerDied","Data":"74f34453cf2139c149b6eabbd3e7ba5a04cc2735860354bf639df72f3a0dc59c"} Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.735856 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-757b4f8459-cqsxn" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.735871 4665 scope.go:117] "RemoveContainer" containerID="8dddd25fa836af08b94cc444e895d5eeef63ef3292ffd922b070e72f2453a6e0" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.765811 4665 scope.go:117] "RemoveContainer" containerID="eb76217eba3af3eab6b9954e3eb543e7b714d696f725f649d14a677f402c2822" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.779004 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.087141199 podStartE2EDuration="6.778987541s" podCreationTimestamp="2025-12-05 01:35:45 +0000 UTC" firstStartedPulling="2025-12-05 01:35:46.716862439 +0000 UTC m=+1522.056254738" lastFinishedPulling="2025-12-05 01:35:50.408708781 +0000 UTC m=+1525.748101080" observedRunningTime="2025-12-05 01:35:51.750479831 +0000 UTC m=+1527.089872130" watchObservedRunningTime="2025-12-05 01:35:51.778987541 +0000 UTC m=+1527.118379830" Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.785933 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-cqsxn"] Dec 05 01:35:51 crc kubenswrapper[4665]: I1205 01:35:51.801230 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-757b4f8459-cqsxn"] Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.748335 4665 generic.go:334] "Generic (PLEG): container finished" podID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerID="490d798af8e52b75af2865a9ad2d7ff47a01fba415d5ea02f08b2788c0043ad0" exitCode=0 Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.748373 4665 generic.go:334] "Generic (PLEG): container finished" podID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerID="21dd5d6d818fc5924cba5dfce43bced3241ed12f90e9af0757e23286e95af063" exitCode=2 Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.748426 4665 generic.go:334] "Generic (PLEG): container finished" podID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerID="ddafcdff0338dbf1ab325b923766f30c929e5ba1e8230d7aa9a0bdbb37148dff" exitCode=0 Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.748418 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerDied","Data":"490d798af8e52b75af2865a9ad2d7ff47a01fba415d5ea02f08b2788c0043ad0"} Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.748483 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerDied","Data":"21dd5d6d818fc5924cba5dfce43bced3241ed12f90e9af0757e23286e95af063"} Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.748501 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerDied","Data":"ddafcdff0338dbf1ab325b923766f30c929e5ba1e8230d7aa9a0bdbb37148dff"} Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.750203 4665 generic.go:334] "Generic (PLEG): container finished" podID="109c1163-805a-4399-b593-66f29ff2046c" containerID="7f3850628abfa8cfcd2eb864137177dc10758be1f6ce19b53bbc65e712ca5ef5" exitCode=0 Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.750272 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lljbq" event={"ID":"109c1163-805a-4399-b593-66f29ff2046c","Type":"ContainerDied","Data":"7f3850628abfa8cfcd2eb864137177dc10758be1f6ce19b53bbc65e712ca5ef5"} Dec 05 01:35:52 crc kubenswrapper[4665]: I1205 01:35:52.904653 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" path="/var/lib/kubelet/pods/44e024d0-ebb1-4a1f-9761-f47b20539a2f/volumes" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.148391 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.230924 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6m8cd\" (UniqueName: \"kubernetes.io/projected/109c1163-805a-4399-b593-66f29ff2046c-kube-api-access-6m8cd\") pod \"109c1163-805a-4399-b593-66f29ff2046c\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.231002 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-combined-ca-bundle\") pod \"109c1163-805a-4399-b593-66f29ff2046c\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.231043 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-config-data\") pod \"109c1163-805a-4399-b593-66f29ff2046c\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.231120 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-scripts\") pod \"109c1163-805a-4399-b593-66f29ff2046c\" (UID: \"109c1163-805a-4399-b593-66f29ff2046c\") " Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.236408 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/109c1163-805a-4399-b593-66f29ff2046c-kube-api-access-6m8cd" (OuterVolumeSpecName: "kube-api-access-6m8cd") pod "109c1163-805a-4399-b593-66f29ff2046c" (UID: "109c1163-805a-4399-b593-66f29ff2046c"). InnerVolumeSpecName "kube-api-access-6m8cd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.238646 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-scripts" (OuterVolumeSpecName: "scripts") pod "109c1163-805a-4399-b593-66f29ff2046c" (UID: "109c1163-805a-4399-b593-66f29ff2046c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.266451 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "109c1163-805a-4399-b593-66f29ff2046c" (UID: "109c1163-805a-4399-b593-66f29ff2046c"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.266538 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-config-data" (OuterVolumeSpecName: "config-data") pod "109c1163-805a-4399-b593-66f29ff2046c" (UID: "109c1163-805a-4399-b593-66f29ff2046c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.333519 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6m8cd\" (UniqueName: \"kubernetes.io/projected/109c1163-805a-4399-b593-66f29ff2046c-kube-api-access-6m8cd\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.333549 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.333559 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.333567 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/109c1163-805a-4399-b593-66f29ff2046c-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.772223 4665 generic.go:334] "Generic (PLEG): container finished" podID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerID="2afadf7b77a8d440d719bf7c743e8c5fbd59076ba1ed071234daca7c4f4c02b0" exitCode=0 Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.772314 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerDied","Data":"2afadf7b77a8d440d719bf7c743e8c5fbd59076ba1ed071234daca7c4f4c02b0"} Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.774276 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-lljbq" event={"ID":"109c1163-805a-4399-b593-66f29ff2046c","Type":"ContainerDied","Data":"230a8821c7fb5ae543e868598194c73fa60cd57231b513232546d0d9f47bb5a3"} Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.774328 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="230a8821c7fb5ae543e868598194c73fa60cd57231b513232546d0d9f47bb5a3" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.774500 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-lljbq" Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.995921 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.996193 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerName="nova-api-log" containerID="cri-o://7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458" gracePeriod=30 Dec 05 01:35:54 crc kubenswrapper[4665]: I1205 01:35:54.996718 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerName="nova-api-api" containerID="cri-o://b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d" gracePeriod=30 Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.033516 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.033771 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" containerName="nova-scheduler-scheduler" containerID="cri-o://4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0" gracePeriod=30 Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.079467 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.080583 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-log" containerID="cri-o://0cd27e04f20931086f739ed9c91cba0917f0db4c4aea8c01005071e83405f977" gracePeriod=30 Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.082146 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-metadata" containerID="cri-o://d200baa5912d28a3b0b2bfbe61180d0fa92b71d068b63cd871bdb0547f64cb7d" gracePeriod=30 Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.408677 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.564155 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-sg-core-conf-yaml\") pod \"b4996c4c-1b2a-4bd2-b865-9fa197505120\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.564225 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-log-httpd\") pod \"b4996c4c-1b2a-4bd2-b865-9fa197505120\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.564406 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-scripts\") pod \"b4996c4c-1b2a-4bd2-b865-9fa197505120\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.564434 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-run-httpd\") pod \"b4996c4c-1b2a-4bd2-b865-9fa197505120\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.564450 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-config-data\") pod \"b4996c4c-1b2a-4bd2-b865-9fa197505120\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.564466 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-psc55\" (UniqueName: \"kubernetes.io/projected/b4996c4c-1b2a-4bd2-b865-9fa197505120-kube-api-access-psc55\") pod \"b4996c4c-1b2a-4bd2-b865-9fa197505120\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.564519 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-ceilometer-tls-certs\") pod \"b4996c4c-1b2a-4bd2-b865-9fa197505120\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.564536 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-combined-ca-bundle\") pod \"b4996c4c-1b2a-4bd2-b865-9fa197505120\" (UID: \"b4996c4c-1b2a-4bd2-b865-9fa197505120\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.566541 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b4996c4c-1b2a-4bd2-b865-9fa197505120" (UID: "b4996c4c-1b2a-4bd2-b865-9fa197505120"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.566960 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b4996c4c-1b2a-4bd2-b865-9fa197505120" (UID: "b4996c4c-1b2a-4bd2-b865-9fa197505120"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.603475 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-scripts" (OuterVolumeSpecName: "scripts") pod "b4996c4c-1b2a-4bd2-b865-9fa197505120" (UID: "b4996c4c-1b2a-4bd2-b865-9fa197505120"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.604272 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b4996c4c-1b2a-4bd2-b865-9fa197505120-kube-api-access-psc55" (OuterVolumeSpecName: "kube-api-access-psc55") pod "b4996c4c-1b2a-4bd2-b865-9fa197505120" (UID: "b4996c4c-1b2a-4bd2-b865-9fa197505120"). InnerVolumeSpecName "kube-api-access-psc55". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.646980 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b4996c4c-1b2a-4bd2-b865-9fa197505120" (UID: "b4996c4c-1b2a-4bd2-b865-9fa197505120"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.672623 4665 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.672659 4665 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.672673 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-psc55\" (UniqueName: \"kubernetes.io/projected/b4996c4c-1b2a-4bd2-b865-9fa197505120-kube-api-access-psc55\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.672684 4665 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.672692 4665 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b4996c4c-1b2a-4bd2-b865-9fa197505120-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.728472 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "b4996c4c-1b2a-4bd2-b865-9fa197505120" (UID: "b4996c4c-1b2a-4bd2-b865-9fa197505120"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.739436 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b4996c4c-1b2a-4bd2-b865-9fa197505120" (UID: "b4996c4c-1b2a-4bd2-b865-9fa197505120"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.765271 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.782210 4665 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.782238 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.799418 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b4996c4c-1b2a-4bd2-b865-9fa197505120","Type":"ContainerDied","Data":"fd63a1fa6ad29e7d5fe2006d97e7be281d71395b651bf1342c100095543ddef5"} Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.799532 4665 scope.go:117] "RemoveContainer" containerID="490d798af8e52b75af2865a9ad2d7ff47a01fba415d5ea02f08b2788c0043ad0" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.799480 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.814265 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-config-data" (OuterVolumeSpecName: "config-data") pod "b4996c4c-1b2a-4bd2-b865-9fa197505120" (UID: "b4996c4c-1b2a-4bd2-b865-9fa197505120"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.819537 4665 generic.go:334] "Generic (PLEG): container finished" podID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerID="0cd27e04f20931086f739ed9c91cba0917f0db4c4aea8c01005071e83405f977" exitCode=143 Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.819612 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e","Type":"ContainerDied","Data":"0cd27e04f20931086f739ed9c91cba0917f0db4c4aea8c01005071e83405f977"} Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.822845 4665 generic.go:334] "Generic (PLEG): container finished" podID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerID="b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d" exitCode=0 Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.822871 4665 generic.go:334] "Generic (PLEG): container finished" podID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerID="7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458" exitCode=143 Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.822888 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5","Type":"ContainerDied","Data":"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d"} Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.822909 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5","Type":"ContainerDied","Data":"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458"} Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.822918 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5","Type":"ContainerDied","Data":"41c117dc209561bc342fc57782155bcf2f01174a7892975924841dabe4758f97"} Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.823068 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.843990 4665 scope.go:117] "RemoveContainer" containerID="21dd5d6d818fc5924cba5dfce43bced3241ed12f90e9af0757e23286e95af063" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.861324 4665 scope.go:117] "RemoveContainer" containerID="ddafcdff0338dbf1ab325b923766f30c929e5ba1e8230d7aa9a0bdbb37148dff" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.883170 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-logs\") pod \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.883269 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jd68s\" (UniqueName: \"kubernetes.io/projected/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-kube-api-access-jd68s\") pod \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.883364 4665 scope.go:117] "RemoveContainer" containerID="2afadf7b77a8d440d719bf7c743e8c5fbd59076ba1ed071234daca7c4f4c02b0" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.883446 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-config-data\") pod \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.883502 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-public-tls-certs\") pod \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.883586 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-internal-tls-certs\") pod \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.883633 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-combined-ca-bundle\") pod \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\" (UID: \"5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5\") " Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.883761 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-logs" (OuterVolumeSpecName: "logs") pod "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" (UID: "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.884715 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b4996c4c-1b2a-4bd2-b865-9fa197505120-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.884744 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.906646 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-kube-api-access-jd68s" (OuterVolumeSpecName: "kube-api-access-jd68s") pod "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" (UID: "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5"). InnerVolumeSpecName "kube-api-access-jd68s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.924930 4665 scope.go:117] "RemoveContainer" containerID="b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.929209 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" (UID: "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.942647 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-config-data" (OuterVolumeSpecName: "config-data") pod "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" (UID: "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.952283 4665 scope.go:117] "RemoveContainer" containerID="7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.958202 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" (UID: "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.960141 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" (UID: "5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.975974 4665 scope.go:117] "RemoveContainer" containerID="b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d" Dec 05 01:35:55 crc kubenswrapper[4665]: E1205 01:35:55.977737 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d\": container with ID starting with b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d not found: ID does not exist" containerID="b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.977771 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d"} err="failed to get container status \"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d\": rpc error: code = NotFound desc = could not find container \"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d\": container with ID starting with b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d not found: ID does not exist" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.977790 4665 scope.go:117] "RemoveContainer" containerID="7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458" Dec 05 01:35:55 crc kubenswrapper[4665]: E1205 01:35:55.978068 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458\": container with ID starting with 7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458 not found: ID does not exist" containerID="7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.978096 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458"} err="failed to get container status \"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458\": rpc error: code = NotFound desc = could not find container \"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458\": container with ID starting with 7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458 not found: ID does not exist" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.978111 4665 scope.go:117] "RemoveContainer" containerID="b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.978364 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d"} err="failed to get container status \"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d\": rpc error: code = NotFound desc = could not find container \"b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d\": container with ID starting with b64b7e94cf1f5819f5f0f218f4ca44e5936b31fb9c820e0596c9658dcae43a6d not found: ID does not exist" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.978390 4665 scope.go:117] "RemoveContainer" containerID="7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.978610 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458"} err="failed to get container status \"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458\": rpc error: code = NotFound desc = could not find container \"7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458\": container with ID starting with 7f0236bd2db09337b7834647115aba3e11bbf61e8f8cd44d86e8d3d6549b1458 not found: ID does not exist" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.986187 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.986211 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jd68s\" (UniqueName: \"kubernetes.io/projected/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-kube-api-access-jd68s\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.986222 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.986230 4665 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:55 crc kubenswrapper[4665]: I1205 01:35:55.986238 4665 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.147501 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.161360 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.185512 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186044 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="ceilometer-central-agent" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186065 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="ceilometer-central-agent" Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186089 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" containerName="dnsmasq-dns" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186098 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" containerName="dnsmasq-dns" Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186109 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerName="nova-api-api" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186117 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerName="nova-api-api" Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186136 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="proxy-httpd" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186143 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="proxy-httpd" Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186158 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerName="nova-api-log" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186166 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerName="nova-api-log" Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186180 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="109c1163-805a-4399-b593-66f29ff2046c" containerName="nova-manage" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186187 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="109c1163-805a-4399-b593-66f29ff2046c" containerName="nova-manage" Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186198 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="ceilometer-notification-agent" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186207 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="ceilometer-notification-agent" Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186240 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" containerName="init" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186249 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" containerName="init" Dec 05 01:35:56 crc kubenswrapper[4665]: E1205 01:35:56.186268 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="sg-core" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186275 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="sg-core" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186494 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="109c1163-805a-4399-b593-66f29ff2046c" containerName="nova-manage" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186518 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="44e024d0-ebb1-4a1f-9761-f47b20539a2f" containerName="dnsmasq-dns" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186530 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerName="nova-api-log" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186539 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="proxy-httpd" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186553 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="ceilometer-central-agent" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186566 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="ceilometer-notification-agent" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186587 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" containerName="sg-core" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.186595 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" containerName="nova-api-api" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.188755 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.191369 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.191557 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.191576 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.203884 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.255390 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.263673 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.291491 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spnn9\" (UniqueName: \"kubernetes.io/projected/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-kube-api-access-spnn9\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.291561 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-config-data\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.291644 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-run-httpd\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.291738 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-log-httpd\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.291800 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.292463 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.292566 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.292995 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-scripts\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.313673 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.317128 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.319768 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.319940 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.320013 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.325020 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.394115 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-internal-tls-certs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.394724 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-config-data\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.394830 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-scripts\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.394930 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spnn9\" (UniqueName: \"kubernetes.io/projected/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-kube-api-access-spnn9\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395021 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6rmvn\" (UniqueName: \"kubernetes.io/projected/661570e0-6f0d-4fd7-8368-b4713af3da59-kube-api-access-6rmvn\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395116 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-config-data\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395233 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-public-tls-certs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395337 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-run-httpd\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395435 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-log-httpd\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395507 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395596 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395709 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395830 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395900 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/661570e0-6f0d-4fd7-8368-b4713af3da59-logs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395900 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-run-httpd\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.395938 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-log-httpd\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.400316 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.400321 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-scripts\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.401016 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.401207 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-config-data\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.406893 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.423117 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spnn9\" (UniqueName: \"kubernetes.io/projected/94d0e25f-bdca-4da9-80c5-b81bedbdd7cc-kube-api-access-spnn9\") pod \"ceilometer-0\" (UID: \"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc\") " pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.497921 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-config-data\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.498259 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6rmvn\" (UniqueName: \"kubernetes.io/projected/661570e0-6f0d-4fd7-8368-b4713af3da59-kube-api-access-6rmvn\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.498348 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-public-tls-certs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.498489 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.498511 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/661570e0-6f0d-4fd7-8368-b4713af3da59-logs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.498554 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-internal-tls-certs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.499599 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/661570e0-6f0d-4fd7-8368-b4713af3da59-logs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.501904 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.501930 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-internal-tls-certs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.502415 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-public-tls-certs\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.502949 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/661570e0-6f0d-4fd7-8368-b4713af3da59-config-data\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.516443 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6rmvn\" (UniqueName: \"kubernetes.io/projected/661570e0-6f0d-4fd7-8368-b4713af3da59-kube-api-access-6rmvn\") pod \"nova-api-0\" (UID: \"661570e0-6f0d-4fd7-8368-b4713af3da59\") " pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.528612 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.632943 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.905196 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5" path="/var/lib/kubelet/pods/5e3bcf3c-5d09-4120-88dc-5a5eafebb5f5/volumes" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.906183 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b4996c4c-1b2a-4bd2-b865-9fa197505120" path="/var/lib/kubelet/pods/b4996c4c-1b2a-4bd2-b865-9fa197505120/volumes" Dec 05 01:35:56 crc kubenswrapper[4665]: I1205 01:35:56.966451 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 01:35:56 crc kubenswrapper[4665]: W1205 01:35:56.979279 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod94d0e25f_bdca_4da9_80c5_b81bedbdd7cc.slice/crio-b2cc7810f305ccbb4c292be3774d31b6a1108419b9289b016dac264b0d8bac98 WatchSource:0}: Error finding container b2cc7810f305ccbb4c292be3774d31b6a1108419b9289b016dac264b0d8bac98: Status 404 returned error can't find the container with id b2cc7810f305ccbb4c292be3774d31b6a1108419b9289b016dac264b0d8bac98 Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.149648 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 01:35:57 crc kubenswrapper[4665]: W1205 01:35:57.167567 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod661570e0_6f0d_4fd7_8368_b4713af3da59.slice/crio-e8180ed4c7e66ad8a108297bde41848b8faa4fb0e16db07f8f4c912f3755736f WatchSource:0}: Error finding container e8180ed4c7e66ad8a108297bde41848b8faa4fb0e16db07f8f4c912f3755736f: Status 404 returned error can't find the container with id e8180ed4c7e66ad8a108297bde41848b8faa4fb0e16db07f8f4c912f3755736f Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.694855 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.847174 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-96fs6\" (UniqueName: \"kubernetes.io/projected/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-kube-api-access-96fs6\") pod \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.847355 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-config-data\") pod \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.847493 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-combined-ca-bundle\") pod \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\" (UID: \"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1\") " Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.854587 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-kube-api-access-96fs6" (OuterVolumeSpecName: "kube-api-access-96fs6") pod "7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" (UID: "7bc8f290-c6fe-47d5-b94b-d547a2eeadc1"). InnerVolumeSpecName "kube-api-access-96fs6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.859168 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc","Type":"ContainerStarted","Data":"52e424cac93149619a65d328dbe03be94c8ce608714200b5cc6ff5f288a6556c"} Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.859219 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc","Type":"ContainerStarted","Data":"b2cc7810f305ccbb4c292be3774d31b6a1108419b9289b016dac264b0d8bac98"} Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.866529 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"661570e0-6f0d-4fd7-8368-b4713af3da59","Type":"ContainerStarted","Data":"53666214478c98f4eb74255e6110b4f9c3a43b3cf9da5ef5dd2408dbff527a0f"} Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.866595 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"661570e0-6f0d-4fd7-8368-b4713af3da59","Type":"ContainerStarted","Data":"7e9956ea5ee01c818b0090e3f2a546412c28c356cf36ec664aa173050fbca992"} Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.866605 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"661570e0-6f0d-4fd7-8368-b4713af3da59","Type":"ContainerStarted","Data":"e8180ed4c7e66ad8a108297bde41848b8faa4fb0e16db07f8f4c912f3755736f"} Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.874500 4665 generic.go:334] "Generic (PLEG): container finished" podID="7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" containerID="4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0" exitCode=0 Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.874567 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1","Type":"ContainerDied","Data":"4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0"} Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.874591 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"7bc8f290-c6fe-47d5-b94b-d547a2eeadc1","Type":"ContainerDied","Data":"113d3c927fd1f4547b326532f4b08a9d1abd3bbb984688b51a5f2d9a49c7dc42"} Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.874609 4665 scope.go:117] "RemoveContainer" containerID="4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.874666 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.880485 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-config-data" (OuterVolumeSpecName: "config-data") pod "7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" (UID: "7bc8f290-c6fe-47d5-b94b-d547a2eeadc1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.906657 4665 scope.go:117] "RemoveContainer" containerID="4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0" Dec 05 01:35:57 crc kubenswrapper[4665]: E1205 01:35:57.907117 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0\": container with ID starting with 4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0 not found: ID does not exist" containerID="4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.907178 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0"} err="failed to get container status \"4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0\": rpc error: code = NotFound desc = could not find container \"4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0\": container with ID starting with 4dd73a6067b50ed9b2f4fb1c28ce6adedffdf8d9f663bd7a1bfa80ca1f9864f0 not found: ID does not exist" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.910762 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" (UID: "7bc8f290-c6fe-47d5-b94b-d547a2eeadc1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.913206 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.913188099 podStartE2EDuration="1.913188099s" podCreationTimestamp="2025-12-05 01:35:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:35:57.907483741 +0000 UTC m=+1533.246876040" watchObservedRunningTime="2025-12-05 01:35:57.913188099 +0000 UTC m=+1533.252580408" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.950306 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-96fs6\" (UniqueName: \"kubernetes.io/projected/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-kube-api-access-96fs6\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.950643 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:57 crc kubenswrapper[4665]: I1205 01:35:57.950722 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.319167 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.342917 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.366118 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:58 crc kubenswrapper[4665]: E1205 01:35:58.366557 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" containerName="nova-scheduler-scheduler" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.366576 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" containerName="nova-scheduler-scheduler" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.366770 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" containerName="nova-scheduler-scheduler" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.367476 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.370542 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.374306 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.459275 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-config-data\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.459438 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.459588 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5tsx\" (UniqueName: \"kubernetes.io/projected/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-kube-api-access-b5tsx\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.562442 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-config-data\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.562531 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.562693 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5tsx\" (UniqueName: \"kubernetes.io/projected/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-kube-api-access-b5tsx\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.572408 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.582010 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-config-data\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.599458 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5tsx\" (UniqueName: \"kubernetes.io/projected/165ee87a-9845-4f0c-b291-9d5fe6a1bdc5-kube-api-access-b5tsx\") pod \"nova-scheduler-0\" (UID: \"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5\") " pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.682072 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.937315 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bc8f290-c6fe-47d5-b94b-d547a2eeadc1" path="/var/lib/kubelet/pods/7bc8f290-c6fe-47d5-b94b-d547a2eeadc1/volumes" Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.938029 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc","Type":"ContainerStarted","Data":"55211bd4e92e6bd787a6af84164e6b7d0e4829f2bb98436e80096ad5b564db79"} Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.940759 4665 generic.go:334] "Generic (PLEG): container finished" podID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerID="d200baa5912d28a3b0b2bfbe61180d0fa92b71d068b63cd871bdb0547f64cb7d" exitCode=0 Dec 05 01:35:58 crc kubenswrapper[4665]: I1205 01:35:58.940848 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e","Type":"ContainerDied","Data":"d200baa5912d28a3b0b2bfbe61180d0fa92b71d068b63cd871bdb0547f64cb7d"} Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.069579 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.180354 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-nova-metadata-tls-certs\") pod \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.180438 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-config-data\") pod \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.180512 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nm674\" (UniqueName: \"kubernetes.io/projected/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-kube-api-access-nm674\") pod \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.180666 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-combined-ca-bundle\") pod \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.180739 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-logs\") pod \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\" (UID: \"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e\") " Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.182558 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-logs" (OuterVolumeSpecName: "logs") pod "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" (UID: "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.197221 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-kube-api-access-nm674" (OuterVolumeSpecName: "kube-api-access-nm674") pod "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" (UID: "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e"). InnerVolumeSpecName "kube-api-access-nm674". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.245574 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-config-data" (OuterVolumeSpecName: "config-data") pod "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" (UID: "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.261021 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" (UID: "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.285233 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.285281 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nm674\" (UniqueName: \"kubernetes.io/projected/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-kube-api-access-nm674\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.285320 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.285331 4665 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.317825 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" (UID: "7d2986ca-e8d1-4199-ac7b-3ea31432eb9e"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.387448 4665 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 01:35:59 crc kubenswrapper[4665]: I1205 01:35:59.461522 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 01:35:59 crc kubenswrapper[4665]: W1205 01:35:59.466632 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod165ee87a_9845_4f0c_b291_9d5fe6a1bdc5.slice/crio-ded638f192ede17a24558d1faccd714004b56bf60af4a149e956f832a4609d47 WatchSource:0}: Error finding container ded638f192ede17a24558d1faccd714004b56bf60af4a149e956f832a4609d47: Status 404 returned error can't find the container with id ded638f192ede17a24558d1faccd714004b56bf60af4a149e956f832a4609d47 Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:35:59.998957 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"7d2986ca-e8d1-4199-ac7b-3ea31432eb9e","Type":"ContainerDied","Data":"b6e164a9bdacd7ee5cc443da1139fc14e62dfb9dd6077caed1b393c9627fd623"} Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:35:59.998987 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:35:59.999307 4665 scope.go:117] "RemoveContainer" containerID="d200baa5912d28a3b0b2bfbe61180d0fa92b71d068b63cd871bdb0547f64cb7d" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.000973 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5","Type":"ContainerStarted","Data":"c786f9ca34ebaabeec6dbd5385360c85cde3810aa8bd427499f68ba0392908bf"} Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.001010 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"165ee87a-9845-4f0c-b291-9d5fe6a1bdc5","Type":"ContainerStarted","Data":"ded638f192ede17a24558d1faccd714004b56bf60af4a149e956f832a4609d47"} Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.004219 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc","Type":"ContainerStarted","Data":"a19de0451ca1aea5c21219e01bc46cd4def9c32e127e28ffc998559ded683442"} Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.018724 4665 scope.go:117] "RemoveContainer" containerID="0cd27e04f20931086f739ed9c91cba0917f0db4c4aea8c01005071e83405f977" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.032747 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.032730225 podStartE2EDuration="2.032730225s" podCreationTimestamp="2025-12-05 01:35:58 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:36:00.026327139 +0000 UTC m=+1535.365719438" watchObservedRunningTime="2025-12-05 01:36:00.032730225 +0000 UTC m=+1535.372122524" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.052544 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.060892 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.076901 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:36:00 crc kubenswrapper[4665]: E1205 01:36:00.077584 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-metadata" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.077707 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-metadata" Dec 05 01:36:00 crc kubenswrapper[4665]: E1205 01:36:00.077813 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-log" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.077891 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-log" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.078172 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-metadata" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.078261 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" containerName="nova-metadata-log" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.079634 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.085881 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.090182 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.111062 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.200019 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.200135 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40f37057-0a9e-40c6-9b67-776cd9b19e54-logs\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.200281 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.200500 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-config-data\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.200617 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tr4tj\" (UniqueName: \"kubernetes.io/projected/40f37057-0a9e-40c6-9b67-776cd9b19e54-kube-api-access-tr4tj\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.303029 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.303123 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-config-data\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.303164 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tr4tj\" (UniqueName: \"kubernetes.io/projected/40f37057-0a9e-40c6-9b67-776cd9b19e54-kube-api-access-tr4tj\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.303214 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.303232 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40f37057-0a9e-40c6-9b67-776cd9b19e54-logs\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.303639 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/40f37057-0a9e-40c6-9b67-776cd9b19e54-logs\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.310676 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.315153 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-config-data\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.320972 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40f37057-0a9e-40c6-9b67-776cd9b19e54-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.322349 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tr4tj\" (UniqueName: \"kubernetes.io/projected/40f37057-0a9e-40c6-9b67-776cd9b19e54-kube-api-access-tr4tj\") pod \"nova-metadata-0\" (UID: \"40f37057-0a9e-40c6-9b67-776cd9b19e54\") " pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.396983 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.903896 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7d2986ca-e8d1-4199-ac7b-3ea31432eb9e" path="/var/lib/kubelet/pods/7d2986ca-e8d1-4199-ac7b-3ea31432eb9e/volumes" Dec 05 01:36:00 crc kubenswrapper[4665]: I1205 01:36:00.919914 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 01:36:01 crc kubenswrapper[4665]: I1205 01:36:01.017131 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"40f37057-0a9e-40c6-9b67-776cd9b19e54","Type":"ContainerStarted","Data":"3318d63c17d0f6dd985a5ee3a473a2e25ebd3b936205184cb04ddbf401f1461c"} Dec 05 01:36:01 crc kubenswrapper[4665]: I1205 01:36:01.035112 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"94d0e25f-bdca-4da9-80c5-b81bedbdd7cc","Type":"ContainerStarted","Data":"debd11bbe2b5d7b4bdc06a419b3851377a4697e8e50ecec8b316e45226ec3520"} Dec 05 01:36:01 crc kubenswrapper[4665]: I1205 01:36:01.035401 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 01:36:01 crc kubenswrapper[4665]: I1205 01:36:01.064502 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=1.276220839 podStartE2EDuration="5.064481535s" podCreationTimestamp="2025-12-05 01:35:56 +0000 UTC" firstStartedPulling="2025-12-05 01:35:56.982418841 +0000 UTC m=+1532.321811140" lastFinishedPulling="2025-12-05 01:36:00.770679537 +0000 UTC m=+1536.110071836" observedRunningTime="2025-12-05 01:36:01.062459225 +0000 UTC m=+1536.401851544" watchObservedRunningTime="2025-12-05 01:36:01.064481535 +0000 UTC m=+1536.403873834" Dec 05 01:36:02 crc kubenswrapper[4665]: I1205 01:36:02.055670 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"40f37057-0a9e-40c6-9b67-776cd9b19e54","Type":"ContainerStarted","Data":"d192abacf94af81a5d5187baef7395e2ba735a4eb5f4337ad1213c70150cc58a"} Dec 05 01:36:02 crc kubenswrapper[4665]: I1205 01:36:02.056205 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"40f37057-0a9e-40c6-9b67-776cd9b19e54","Type":"ContainerStarted","Data":"84013430da2cd61fc19faadaeee67946c25b464a7685ec03dd9699e7806c8b49"} Dec 05 01:36:02 crc kubenswrapper[4665]: I1205 01:36:02.080163 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.080144375 podStartE2EDuration="2.080144375s" podCreationTimestamp="2025-12-05 01:36:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:36:02.076962058 +0000 UTC m=+1537.416354357" watchObservedRunningTime="2025-12-05 01:36:02.080144375 +0000 UTC m=+1537.419536674" Dec 05 01:36:03 crc kubenswrapper[4665]: I1205 01:36:03.682658 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 01:36:05 crc kubenswrapper[4665]: I1205 01:36:05.397518 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 01:36:05 crc kubenswrapper[4665]: I1205 01:36:05.397568 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 01:36:06 crc kubenswrapper[4665]: I1205 01:36:06.634817 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 01:36:06 crc kubenswrapper[4665]: I1205 01:36:06.636036 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 01:36:07 crc kubenswrapper[4665]: I1205 01:36:07.652874 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="661570e0-6f0d-4fd7-8368-b4713af3da59" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 01:36:07 crc kubenswrapper[4665]: I1205 01:36:07.653570 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="661570e0-6f0d-4fd7-8368-b4713af3da59" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.207:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 01:36:08 crc kubenswrapper[4665]: I1205 01:36:08.683192 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 01:36:08 crc kubenswrapper[4665]: I1205 01:36:08.717954 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 01:36:09 crc kubenswrapper[4665]: I1205 01:36:09.187880 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 01:36:10 crc kubenswrapper[4665]: I1205 01:36:10.397857 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 01:36:10 crc kubenswrapper[4665]: I1205 01:36:10.398520 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 01:36:11 crc kubenswrapper[4665]: I1205 01:36:11.410605 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="40f37057-0a9e-40c6-9b67-776cd9b19e54" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 01:36:11 crc kubenswrapper[4665]: I1205 01:36:11.410653 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="40f37057-0a9e-40c6-9b67-776cd9b19e54" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.209:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 01:36:16 crc kubenswrapper[4665]: I1205 01:36:16.641777 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 01:36:16 crc kubenswrapper[4665]: I1205 01:36:16.642620 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 01:36:16 crc kubenswrapper[4665]: I1205 01:36:16.648383 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 01:36:16 crc kubenswrapper[4665]: I1205 01:36:16.649270 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 01:36:17 crc kubenswrapper[4665]: I1205 01:36:17.251868 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 01:36:17 crc kubenswrapper[4665]: I1205 01:36:17.259594 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 01:36:20 crc kubenswrapper[4665]: I1205 01:36:20.407627 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 01:36:20 crc kubenswrapper[4665]: I1205 01:36:20.409495 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 01:36:20 crc kubenswrapper[4665]: I1205 01:36:20.416150 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 01:36:21 crc kubenswrapper[4665]: I1205 01:36:21.296803 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 01:36:26 crc kubenswrapper[4665]: I1205 01:36:26.537953 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 01:36:38 crc kubenswrapper[4665]: I1205 01:36:38.138217 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:36:39 crc kubenswrapper[4665]: I1205 01:36:39.669071 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:36:42 crc kubenswrapper[4665]: I1205 01:36:42.580126 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="753728b2-97f7-4b79-8daf-19e01260d537" containerName="rabbitmq" containerID="cri-o://c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724" gracePeriod=604796 Dec 05 01:36:43 crc kubenswrapper[4665]: I1205 01:36:43.973649 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" containerName="rabbitmq" containerID="cri-o://68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691" gracePeriod=604796 Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.090131 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.127643 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227271 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-config-data\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227379 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-confd\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227403 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-plugins-conf\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227466 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-server-conf\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227502 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-erlang-cookie\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227557 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-tls\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227595 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/753728b2-97f7-4b79-8daf-19e01260d537-erlang-cookie-secret\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227683 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/753728b2-97f7-4b79-8daf-19e01260d537-pod-info\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227706 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227725 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p4s4v\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-kube-api-access-p4s4v\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227755 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-plugins\") pod \"753728b2-97f7-4b79-8daf-19e01260d537\" (UID: \"753728b2-97f7-4b79-8daf-19e01260d537\") " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.227964 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.228145 4665 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.231757 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.232266 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.244866 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/753728b2-97f7-4b79-8daf-19e01260d537-pod-info" (OuterVolumeSpecName: "pod-info") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.254575 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-kube-api-access-p4s4v" (OuterVolumeSpecName: "kube-api-access-p4s4v") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "kube-api-access-p4s4v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.275275 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/753728b2-97f7-4b79-8daf-19e01260d537-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.275333 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.275371 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "persistence") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.299009 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-config-data" (OuterVolumeSpecName: "config-data") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.329517 4665 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.329545 4665 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.329556 4665 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/753728b2-97f7-4b79-8daf-19e01260d537-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.329564 4665 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/753728b2-97f7-4b79-8daf-19e01260d537-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.329591 4665 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.329600 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p4s4v\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-kube-api-access-p4s4v\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.329609 4665 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.329618 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.343688 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-server-conf" (OuterVolumeSpecName: "server-conf") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.383469 4665 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.419002 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "753728b2-97f7-4b79-8daf-19e01260d537" (UID: "753728b2-97f7-4b79-8daf-19e01260d537"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.431189 4665 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.431235 4665 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/753728b2-97f7-4b79-8daf-19e01260d537-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.431246 4665 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/753728b2-97f7-4b79-8daf-19e01260d537-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.550390 4665 generic.go:334] "Generic (PLEG): container finished" podID="753728b2-97f7-4b79-8daf-19e01260d537" containerID="c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724" exitCode=0 Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.550434 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"753728b2-97f7-4b79-8daf-19e01260d537","Type":"ContainerDied","Data":"c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724"} Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.550459 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"753728b2-97f7-4b79-8daf-19e01260d537","Type":"ContainerDied","Data":"e5816000a33b0f5880cccc9f1eff44840e38e704e90e4b2bc6c9d98309229bcc"} Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.550460 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.550475 4665 scope.go:117] "RemoveContainer" containerID="c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.606532 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.609235 4665 scope.go:117] "RemoveContainer" containerID="ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.615515 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.643811 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:36:49 crc kubenswrapper[4665]: E1205 01:36:49.644333 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="753728b2-97f7-4b79-8daf-19e01260d537" containerName="rabbitmq" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.644357 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="753728b2-97f7-4b79-8daf-19e01260d537" containerName="rabbitmq" Dec 05 01:36:49 crc kubenswrapper[4665]: E1205 01:36:49.644383 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="753728b2-97f7-4b79-8daf-19e01260d537" containerName="setup-container" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.644391 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="753728b2-97f7-4b79-8daf-19e01260d537" containerName="setup-container" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.644628 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="753728b2-97f7-4b79-8daf-19e01260d537" containerName="rabbitmq" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.645793 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.659554 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.659780 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.661743 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.661910 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.661945 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.662088 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.662238 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-9fcvg" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.672442 4665 scope.go:117] "RemoveContainer" containerID="c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724" Dec 05 01:36:49 crc kubenswrapper[4665]: E1205 01:36:49.674255 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724\": container with ID starting with c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724 not found: ID does not exist" containerID="c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.674347 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724"} err="failed to get container status \"c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724\": rpc error: code = NotFound desc = could not find container \"c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724\": container with ID starting with c23f9c9b6e782326f69fb576fa5502c1aba5fd30370fbdb03269d7d70a308724 not found: ID does not exist" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.674388 4665 scope.go:117] "RemoveContainer" containerID="ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.674510 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:36:49 crc kubenswrapper[4665]: E1205 01:36:49.674740 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed\": container with ID starting with ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed not found: ID does not exist" containerID="ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.674771 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed"} err="failed to get container status \"ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed\": rpc error: code = NotFound desc = could not find container \"ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed\": container with ID starting with ad6c34d45f2e5957d56a68ee1781bf4ac2c85caaf4bff585d01373807dfb5bed not found: ID does not exist" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.836808 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.836849 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fz76t\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-kube-api-access-fz76t\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.836967 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a8135c5-ab50-4a2b-895a-7976da8b5bee-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.837066 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.837112 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.837187 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.837221 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.837241 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-config-data\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.837384 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a8135c5-ab50-4a2b-895a-7976da8b5bee-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.837426 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.837450 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.938769 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.938815 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-config-data\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.938834 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.938890 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a8135c5-ab50-4a2b-895a-7976da8b5bee-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.938912 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.938934 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.938958 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.938983 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fz76t\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-kube-api-access-fz76t\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.939043 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a8135c5-ab50-4a2b-895a-7976da8b5bee-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.939105 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.939109 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") device mount path \"/mnt/openstack/pv01\"" pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.939783 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.939816 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-config-data\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.940262 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.940342 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.940415 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.941215 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/7a8135c5-ab50-4a2b-895a-7976da8b5bee-server-conf\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.944373 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.944373 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.944626 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/7a8135c5-ab50-4a2b-895a-7976da8b5bee-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.944798 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/7a8135c5-ab50-4a2b-895a-7976da8b5bee-pod-info\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.966956 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fz76t\" (UniqueName: \"kubernetes.io/projected/7a8135c5-ab50-4a2b-895a-7976da8b5bee-kube-api-access-fz76t\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.984228 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"rabbitmq-server-0\" (UID: \"7a8135c5-ab50-4a2b-895a-7976da8b5bee\") " pod="openstack/rabbitmq-server-0" Dec 05 01:36:49 crc kubenswrapper[4665]: I1205 01:36:49.993818 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.529652 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.555022 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.560659 4665 generic.go:334] "Generic (PLEG): container finished" podID="82ad13d8-7710-4135-9822-a96d62650e6d" containerID="68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691" exitCode=0 Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.560711 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.560750 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"82ad13d8-7710-4135-9822-a96d62650e6d","Type":"ContainerDied","Data":"68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691"} Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.560791 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"82ad13d8-7710-4135-9822-a96d62650e6d","Type":"ContainerDied","Data":"92f2fcaf28e141f0c19b52536ccfbf6fa53653202bdc54ea4b7fd5c6c7d13d99"} Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.560808 4665 scope.go:117] "RemoveContainer" containerID="68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563221 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-tls\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563259 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-confd\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563281 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-server-conf\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563650 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f5j4s\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-kube-api-access-f5j4s\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563724 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/82ad13d8-7710-4135-9822-a96d62650e6d-erlang-cookie-secret\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563751 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/82ad13d8-7710-4135-9822-a96d62650e6d-pod-info\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563774 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-plugins-conf\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563834 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-erlang-cookie\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563872 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-plugins\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563895 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.563950 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-config-data\") pod \"82ad13d8-7710-4135-9822-a96d62650e6d\" (UID: \"82ad13d8-7710-4135-9822-a96d62650e6d\") " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.564852 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.564980 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.565235 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.588718 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-kube-api-access-f5j4s" (OuterVolumeSpecName: "kube-api-access-f5j4s") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "kube-api-access-f5j4s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.592606 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7a8135c5-ab50-4a2b-895a-7976da8b5bee","Type":"ContainerStarted","Data":"1415c833eb70680c725312fe5bee7e0f9cf3718e4a2243e6da2f8858ff204f7c"} Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.608503 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage12-crc" (OuterVolumeSpecName: "persistence") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "local-storage12-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.613029 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/82ad13d8-7710-4135-9822-a96d62650e6d-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.634597 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/82ad13d8-7710-4135-9822-a96d62650e6d-pod-info" (OuterVolumeSpecName: "pod-info") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.636907 4665 scope.go:117] "RemoveContainer" containerID="21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.647212 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670314 4665 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670345 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f5j4s\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-kube-api-access-f5j4s\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670359 4665 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/82ad13d8-7710-4135-9822-a96d62650e6d-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670373 4665 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/82ad13d8-7710-4135-9822-a96d62650e6d-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670382 4665 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670390 4665 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670400 4665 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670428 4665 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" " Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.670661 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-config-data" (OuterVolumeSpecName: "config-data") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.696260 4665 scope.go:117] "RemoveContainer" containerID="68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691" Dec 05 01:36:50 crc kubenswrapper[4665]: E1205 01:36:50.697253 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691\": container with ID starting with 68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691 not found: ID does not exist" containerID="68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.697315 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691"} err="failed to get container status \"68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691\": rpc error: code = NotFound desc = could not find container \"68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691\": container with ID starting with 68eb92747eb5b9149271000ef0f9b6387b4fd5c2938c46feaf67f6a0ceae9691 not found: ID does not exist" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.697341 4665 scope.go:117] "RemoveContainer" containerID="21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993" Dec 05 01:36:50 crc kubenswrapper[4665]: E1205 01:36:50.697828 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993\": container with ID starting with 21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993 not found: ID does not exist" containerID="21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.698071 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993"} err="failed to get container status \"21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993\": rpc error: code = NotFound desc = could not find container \"21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993\": container with ID starting with 21680eb362cd8e9adf0a46190a2a0086a2499a8f49cd82521b6d1f78e0cba993 not found: ID does not exist" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.737532 4665 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage12-crc" (UniqueName: "kubernetes.io/local-volume/local-storage12-crc") on node "crc" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.752589 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-server-conf" (OuterVolumeSpecName: "server-conf") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.772916 4665 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.772967 4665 reconciler_common.go:293] "Volume detached for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.772978 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/82ad13d8-7710-4135-9822-a96d62650e6d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.785417 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "82ad13d8-7710-4135-9822-a96d62650e6d" (UID: "82ad13d8-7710-4135-9822-a96d62650e6d"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.874320 4665 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/82ad13d8-7710-4135-9822-a96d62650e6d-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.906208 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="753728b2-97f7-4b79-8daf-19e01260d537" path="/var/lib/kubelet/pods/753728b2-97f7-4b79-8daf-19e01260d537/volumes" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.906820 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.906849 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.927822 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:36:50 crc kubenswrapper[4665]: E1205 01:36:50.928205 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" containerName="setup-container" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.928220 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" containerName="setup-container" Dec 05 01:36:50 crc kubenswrapper[4665]: E1205 01:36:50.928246 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" containerName="rabbitmq" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.928252 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" containerName="rabbitmq" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.928433 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" containerName="rabbitmq" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.929397 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.933721 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-mfvvg" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.933797 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.933894 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.933974 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.934020 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.934096 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.941235 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 01:36:50 crc kubenswrapper[4665]: I1205 01:36:50.946649 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087311 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b5dc1946-d77e-4106-9350-326f32a2ae55-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087641 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087671 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087719 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087750 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsqgn\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-kube-api-access-fsqgn\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087768 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087804 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087834 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087872 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087904 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.087918 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b5dc1946-d77e-4106-9350-326f32a2ae55-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189642 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189707 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189756 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189782 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189797 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b5dc1946-d77e-4106-9350-326f32a2ae55-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189834 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b5dc1946-d77e-4106-9350-326f32a2ae55-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189864 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189893 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189927 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189951 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsqgn\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-kube-api-access-fsqgn\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.189966 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.190176 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.190673 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.191399 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.191623 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.191628 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/b5dc1946-d77e-4106-9350-326f32a2ae55-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.191756 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") device mount path \"/mnt/openstack/pv12\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.194044 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/b5dc1946-d77e-4106-9350-326f32a2ae55-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.194769 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.195501 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/b5dc1946-d77e-4106-9350-326f32a2ae55-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.199225 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.214446 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsqgn\" (UniqueName: \"kubernetes.io/projected/b5dc1946-d77e-4106-9350-326f32a2ae55-kube-api-access-fsqgn\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.222898 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage12-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage12-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"b5dc1946-d77e-4106-9350-326f32a2ae55\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.248214 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:36:51 crc kubenswrapper[4665]: I1205 01:36:51.753999 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 01:36:51 crc kubenswrapper[4665]: W1205 01:36:51.764891 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5dc1946_d77e_4106_9350_326f32a2ae55.slice/crio-777ec5dee5adcf819d9fd068ecbeb655094e78c04227fd1e844ec43b55235282 WatchSource:0}: Error finding container 777ec5dee5adcf819d9fd068ecbeb655094e78c04227fd1e844ec43b55235282: Status 404 returned error can't find the container with id 777ec5dee5adcf819d9fd068ecbeb655094e78c04227fd1e844ec43b55235282 Dec 05 01:36:52 crc kubenswrapper[4665]: I1205 01:36:52.612029 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b5dc1946-d77e-4106-9350-326f32a2ae55","Type":"ContainerStarted","Data":"777ec5dee5adcf819d9fd068ecbeb655094e78c04227fd1e844ec43b55235282"} Dec 05 01:36:52 crc kubenswrapper[4665]: I1205 01:36:52.620926 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7a8135c5-ab50-4a2b-895a-7976da8b5bee","Type":"ContainerStarted","Data":"7fec43334eae866a8a38985cd092783de73843a33054cefc71252c7e893a1be2"} Dec 05 01:36:52 crc kubenswrapper[4665]: I1205 01:36:52.907761 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82ad13d8-7710-4135-9822-a96d62650e6d" path="/var/lib/kubelet/pods/82ad13d8-7710-4135-9822-a96d62650e6d/volumes" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.069592 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-jqpk7"] Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.071495 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.073686 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.083002 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-jqpk7"] Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.232701 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.232761 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.232826 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.232903 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.233059 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-config\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.233153 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7bx8\" (UniqueName: \"kubernetes.io/projected/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-kube-api-access-z7bx8\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.233185 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.334964 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.335733 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-openstack-edpm-ipam\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.335896 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.335931 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.335960 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.336711 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.336781 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-config\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.336826 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-svc\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.336935 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-sb\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.336646 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-nb\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.336834 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7bx8\" (UniqueName: \"kubernetes.io/projected/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-kube-api-access-z7bx8\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.338200 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-swift-storage-0\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.338561 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-config\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.358583 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7bx8\" (UniqueName: \"kubernetes.io/projected/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-kube-api-access-z7bx8\") pod \"dnsmasq-dns-79bd4cc8c9-jqpk7\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.394811 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.632197 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b5dc1946-d77e-4106-9350-326f32a2ae55","Type":"ContainerStarted","Data":"3a3251e408a75165b9d42fa20860e3f724f624c7cb3b83b5f5ad0b0daf2097f7"} Dec 05 01:36:53 crc kubenswrapper[4665]: I1205 01:36:53.948960 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-jqpk7"] Dec 05 01:36:54 crc kubenswrapper[4665]: I1205 01:36:54.010156 4665 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="753728b2-97f7-4b79-8daf-19e01260d537" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.98:5671: i/o timeout" Dec 05 01:36:54 crc kubenswrapper[4665]: I1205 01:36:54.643621 4665 generic.go:334] "Generic (PLEG): container finished" podID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" containerID="faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c" exitCode=0 Dec 05 01:36:54 crc kubenswrapper[4665]: I1205 01:36:54.643710 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" event={"ID":"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2","Type":"ContainerDied","Data":"faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c"} Dec 05 01:36:54 crc kubenswrapper[4665]: I1205 01:36:54.643753 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" event={"ID":"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2","Type":"ContainerStarted","Data":"711b164769c99b838c97ab77f1ef73e09032283553aff345ad4ebcf53d37ac89"} Dec 05 01:36:55 crc kubenswrapper[4665]: I1205 01:36:55.654108 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" event={"ID":"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2","Type":"ContainerStarted","Data":"8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349"} Dec 05 01:36:55 crc kubenswrapper[4665]: I1205 01:36:55.655082 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:36:55 crc kubenswrapper[4665]: I1205 01:36:55.682486 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" podStartSLOduration=2.682467177 podStartE2EDuration="2.682467177s" podCreationTimestamp="2025-12-05 01:36:53 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:36:55.676178244 +0000 UTC m=+1591.015570583" watchObservedRunningTime="2025-12-05 01:36:55.682467177 +0000 UTC m=+1591.021859486" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.397455 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.494600 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-7bn8l"] Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.495167 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" podUID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" containerName="dnsmasq-dns" containerID="cri-o://b0503487f0a11712bcfc8edd04e9add1b1ccea571e40ee497cec9e17ccee4bb8" gracePeriod=10 Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.754536 4665 generic.go:334] "Generic (PLEG): container finished" podID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" containerID="b0503487f0a11712bcfc8edd04e9add1b1ccea571e40ee497cec9e17ccee4bb8" exitCode=0 Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.754579 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" event={"ID":"a14df5c5-95ad-4881-ab5a-d21c81eb6aad","Type":"ContainerDied","Data":"b0503487f0a11712bcfc8edd04e9add1b1ccea571e40ee497cec9e17ccee4bb8"} Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.766461 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-54ffdb7d8c-2jtfc"] Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.774236 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.798696 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54ffdb7d8c-2jtfc"] Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.850131 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-dns-svc\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.850196 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-ovsdbserver-nb\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.850236 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-config\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.850282 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-openstack-edpm-ipam\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.850325 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-dns-swift-storage-0\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.850351 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-ovsdbserver-sb\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.850427 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-85hz5\" (UniqueName: \"kubernetes.io/projected/ce335393-a026-4267-b337-ca077b2461b8-kube-api-access-85hz5\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.952035 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-openstack-edpm-ipam\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.952084 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-dns-swift-storage-0\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.952116 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-ovsdbserver-sb\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.952195 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-85hz5\" (UniqueName: \"kubernetes.io/projected/ce335393-a026-4267-b337-ca077b2461b8-kube-api-access-85hz5\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.952215 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-dns-svc\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.952238 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-ovsdbserver-nb\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.952272 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-config\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.953129 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-config\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.953717 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-openstack-edpm-ipam\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.954399 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-ovsdbserver-nb\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.954471 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-dns-swift-storage-0\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.954514 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-dns-svc\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.954948 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/ce335393-a026-4267-b337-ca077b2461b8-ovsdbserver-sb\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:03 crc kubenswrapper[4665]: I1205 01:37:03.988076 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-85hz5\" (UniqueName: \"kubernetes.io/projected/ce335393-a026-4267-b337-ca077b2461b8-kube-api-access-85hz5\") pod \"dnsmasq-dns-54ffdb7d8c-2jtfc\" (UID: \"ce335393-a026-4267-b337-ca077b2461b8\") " pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:04 crc kubenswrapper[4665]: I1205 01:37:04.102349 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.681642 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.763611 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" event={"ID":"a14df5c5-95ad-4881-ab5a-d21c81eb6aad","Type":"ContainerDied","Data":"7e13d17c8b617ae6c0c9f68a12d1516a54ffd725178b9f5bbe7567b61a257459"} Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.763653 4665 scope.go:117] "RemoveContainer" containerID="b0503487f0a11712bcfc8edd04e9add1b1ccea571e40ee497cec9e17ccee4bb8" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.763766 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-89c5cd4d5-7bn8l" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.766344 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-config\") pod \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.766520 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-nb\") pod \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.766548 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkfjq\" (UniqueName: \"kubernetes.io/projected/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-kube-api-access-dkfjq\") pod \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.766588 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-svc\") pod \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.766622 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-sb\") pod \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.766637 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-swift-storage-0\") pod \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\" (UID: \"a14df5c5-95ad-4881-ab5a-d21c81eb6aad\") " Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.788045 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-kube-api-access-dkfjq" (OuterVolumeSpecName: "kube-api-access-dkfjq") pod "a14df5c5-95ad-4881-ab5a-d21c81eb6aad" (UID: "a14df5c5-95ad-4881-ab5a-d21c81eb6aad"). InnerVolumeSpecName "kube-api-access-dkfjq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.788962 4665 scope.go:117] "RemoveContainer" containerID="5d6e3975e2811d6512f26236679f4aea38bd803abf815fff70d50e20470c18a3" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.839919 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a14df5c5-95ad-4881-ab5a-d21c81eb6aad" (UID: "a14df5c5-95ad-4881-ab5a-d21c81eb6aad"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.857080 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-config" (OuterVolumeSpecName: "config") pod "a14df5c5-95ad-4881-ab5a-d21c81eb6aad" (UID: "a14df5c5-95ad-4881-ab5a-d21c81eb6aad"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.871802 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.871828 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.871838 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkfjq\" (UniqueName: \"kubernetes.io/projected/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-kube-api-access-dkfjq\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.875467 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a14df5c5-95ad-4881-ab5a-d21c81eb6aad" (UID: "a14df5c5-95ad-4881-ab5a-d21c81eb6aad"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.881699 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a14df5c5-95ad-4881-ab5a-d21c81eb6aad" (UID: "a14df5c5-95ad-4881-ab5a-d21c81eb6aad"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.884623 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a14df5c5-95ad-4881-ab5a-d21c81eb6aad" (UID: "a14df5c5-95ad-4881-ab5a-d21c81eb6aad"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.973491 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.973516 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:04.973527 4665 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a14df5c5-95ad-4881-ab5a-d21c81eb6aad-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:05.090340 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-7bn8l"] Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:05.098412 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-89c5cd4d5-7bn8l"] Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:05.349364 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-54ffdb7d8c-2jtfc"] Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:05.773626 4665 generic.go:334] "Generic (PLEG): container finished" podID="ce335393-a026-4267-b337-ca077b2461b8" containerID="7bc0d213717654a93d2a42f7c37c9a6258270acd2451ce3995e79881e2e1b7d9" exitCode=0 Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:05.773826 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" event={"ID":"ce335393-a026-4267-b337-ca077b2461b8","Type":"ContainerDied","Data":"7bc0d213717654a93d2a42f7c37c9a6258270acd2451ce3995e79881e2e1b7d9"} Dec 05 01:37:05 crc kubenswrapper[4665]: I1205 01:37:05.773928 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" event={"ID":"ce335393-a026-4267-b337-ca077b2461b8","Type":"ContainerStarted","Data":"920a918bcf19824e32dbdc7f1a86199c43634216f32b37c2cb73f3c8296cb1b5"} Dec 05 01:37:06 crc kubenswrapper[4665]: I1205 01:37:06.787274 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" event={"ID":"ce335393-a026-4267-b337-ca077b2461b8","Type":"ContainerStarted","Data":"8286a76b6d45bc7bb194e597da70ac0c16660ed1593e246bc68b636a4b213a9c"} Dec 05 01:37:06 crc kubenswrapper[4665]: I1205 01:37:06.788479 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:06 crc kubenswrapper[4665]: I1205 01:37:06.808224 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" podStartSLOduration=3.808203399 podStartE2EDuration="3.808203399s" podCreationTimestamp="2025-12-05 01:37:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:37:06.806109448 +0000 UTC m=+1602.145501747" watchObservedRunningTime="2025-12-05 01:37:06.808203399 +0000 UTC m=+1602.147595698" Dec 05 01:37:06 crc kubenswrapper[4665]: I1205 01:37:06.903722 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" path="/var/lib/kubelet/pods/a14df5c5-95ad-4881-ab5a-d21c81eb6aad/volumes" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.104349 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-54ffdb7d8c-2jtfc" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.190878 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-jqpk7"] Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.191402 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" podUID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" containerName="dnsmasq-dns" containerID="cri-o://8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349" gracePeriod=10 Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.745084 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.790051 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7bx8\" (UniqueName: \"kubernetes.io/projected/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-kube-api-access-z7bx8\") pod \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.790165 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-openstack-edpm-ipam\") pod \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.790306 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-sb\") pod \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.790342 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-svc\") pod \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.790376 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-swift-storage-0\") pod \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.790428 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-nb\") pod \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.790455 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-config\") pod \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.824922 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-kube-api-access-z7bx8" (OuterVolumeSpecName: "kube-api-access-z7bx8") pod "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" (UID: "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2"). InnerVolumeSpecName "kube-api-access-z7bx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.864243 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" (UID: "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.874915 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" (UID: "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.890141 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" (UID: "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.895869 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" (UID: "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.896881 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-nb\") pod \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\" (UID: \"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2\") " Dec 05 01:37:14 crc kubenswrapper[4665]: W1205 01:37:14.897163 4665 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2/volumes/kubernetes.io~configmap/ovsdbserver-nb Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.897181 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" (UID: "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.897623 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7bx8\" (UniqueName: \"kubernetes.io/projected/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-kube-api-access-z7bx8\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.897647 4665 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.897660 4665 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.897674 4665 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.897685 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.900615 4665 generic.go:334] "Generic (PLEG): container finished" podID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" containerID="8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349" exitCode=0 Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.900985 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" (UID: "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.905884 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.915312 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" event={"ID":"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2","Type":"ContainerDied","Data":"8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349"} Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.915366 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-79bd4cc8c9-jqpk7" event={"ID":"85a5bed1-d9d1-4edd-8c7b-99b209aac6e2","Type":"ContainerDied","Data":"711b164769c99b838c97ab77f1ef73e09032283553aff345ad4ebcf53d37ac89"} Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.915387 4665 scope.go:117] "RemoveContainer" containerID="8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.922510 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.922567 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.938757 4665 scope.go:117] "RemoveContainer" containerID="faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.950524 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-config" (OuterVolumeSpecName: "config") pod "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" (UID: "85a5bed1-d9d1-4edd-8c7b-99b209aac6e2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.957790 4665 scope.go:117] "RemoveContainer" containerID="8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349" Dec 05 01:37:14 crc kubenswrapper[4665]: E1205 01:37:14.958121 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349\": container with ID starting with 8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349 not found: ID does not exist" containerID="8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.958161 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349"} err="failed to get container status \"8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349\": rpc error: code = NotFound desc = could not find container \"8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349\": container with ID starting with 8454d46e9b87b53735b17c7d088f9d93e538707c78799b7a5de26c70248b2349 not found: ID does not exist" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.958184 4665 scope.go:117] "RemoveContainer" containerID="faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c" Dec 05 01:37:14 crc kubenswrapper[4665]: E1205 01:37:14.958456 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c\": container with ID starting with faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c not found: ID does not exist" containerID="faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.958479 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c"} err="failed to get container status \"faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c\": rpc error: code = NotFound desc = could not find container \"faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c\": container with ID starting with faa098c0342ecfc64b90d0c1320dd2ef4fb86e8db7d96a92a7156449fb95959c not found: ID does not exist" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.998795 4665 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:14 crc kubenswrapper[4665]: I1205 01:37:14.998826 4665 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2-config\") on node \"crc\" DevicePath \"\"" Dec 05 01:37:15 crc kubenswrapper[4665]: I1205 01:37:15.248820 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-jqpk7"] Dec 05 01:37:15 crc kubenswrapper[4665]: I1205 01:37:15.258492 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-79bd4cc8c9-jqpk7"] Dec 05 01:37:16 crc kubenswrapper[4665]: I1205 01:37:16.905531 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" path="/var/lib/kubelet/pods/85a5bed1-d9d1-4edd-8c7b-99b209aac6e2/volumes" Dec 05 01:37:24 crc kubenswrapper[4665]: I1205 01:37:24.995133 4665 generic.go:334] "Generic (PLEG): container finished" podID="7a8135c5-ab50-4a2b-895a-7976da8b5bee" containerID="7fec43334eae866a8a38985cd092783de73843a33054cefc71252c7e893a1be2" exitCode=0 Dec 05 01:37:24 crc kubenswrapper[4665]: I1205 01:37:24.995180 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7a8135c5-ab50-4a2b-895a-7976da8b5bee","Type":"ContainerDied","Data":"7fec43334eae866a8a38985cd092783de73843a33054cefc71252c7e893a1be2"} Dec 05 01:37:25 crc kubenswrapper[4665]: E1205 01:37:25.731204 4665 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5dc1946_d77e_4106_9350_326f32a2ae55.slice/crio-3a3251e408a75165b9d42fa20860e3f724f624c7cb3b83b5f5ad0b0daf2097f7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb5dc1946_d77e_4106_9350_326f32a2ae55.slice/crio-conmon-3a3251e408a75165b9d42fa20860e3f724f624c7cb3b83b5f5ad0b0daf2097f7.scope\": RecentStats: unable to find data in memory cache]" Dec 05 01:37:26 crc kubenswrapper[4665]: I1205 01:37:26.005730 4665 generic.go:334] "Generic (PLEG): container finished" podID="b5dc1946-d77e-4106-9350-326f32a2ae55" containerID="3a3251e408a75165b9d42fa20860e3f724f624c7cb3b83b5f5ad0b0daf2097f7" exitCode=0 Dec 05 01:37:26 crc kubenswrapper[4665]: I1205 01:37:26.005774 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b5dc1946-d77e-4106-9350-326f32a2ae55","Type":"ContainerDied","Data":"3a3251e408a75165b9d42fa20860e3f724f624c7cb3b83b5f5ad0b0daf2097f7"} Dec 05 01:37:26 crc kubenswrapper[4665]: I1205 01:37:26.010379 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"7a8135c5-ab50-4a2b-895a-7976da8b5bee","Type":"ContainerStarted","Data":"b5d4d445bf2f534a5909236dcfa63c150755049fac797aac7b4c691d2eeb0ee4"} Dec 05 01:37:26 crc kubenswrapper[4665]: I1205 01:37:26.011037 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 01:37:26 crc kubenswrapper[4665]: I1205 01:37:26.102803 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=37.10278543 podStartE2EDuration="37.10278543s" podCreationTimestamp="2025-12-05 01:36:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:37:26.063227848 +0000 UTC m=+1621.402620157" watchObservedRunningTime="2025-12-05 01:37:26.10278543 +0000 UTC m=+1621.442177729" Dec 05 01:37:27 crc kubenswrapper[4665]: I1205 01:37:27.021744 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"b5dc1946-d77e-4106-9350-326f32a2ae55","Type":"ContainerStarted","Data":"841a4cc228d1cf7eca3e3d84f1cafb05d83e141f4873e8f21264ffc180b16de7"} Dec 05 01:37:27 crc kubenswrapper[4665]: I1205 01:37:27.022321 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:37:27 crc kubenswrapper[4665]: I1205 01:37:27.042880 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=37.042865301 podStartE2EDuration="37.042865301s" podCreationTimestamp="2025-12-05 01:36:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:37:27.04160501 +0000 UTC m=+1622.380997329" watchObservedRunningTime="2025-12-05 01:37:27.042865301 +0000 UTC m=+1622.382257600" Dec 05 01:37:31 crc kubenswrapper[4665]: I1205 01:37:31.452506 4665 scope.go:117] "RemoveContainer" containerID="a9ff8bdcd3b3ecf7b32a9a7e0ec7c0f8f50930d24164db0958d82ef6e7f35912" Dec 05 01:37:31 crc kubenswrapper[4665]: I1205 01:37:31.480444 4665 scope.go:117] "RemoveContainer" containerID="f3984be52016e332c575d67ff002b615eb5fd0d73d4bd66fb4f0c371e26877e7" Dec 05 01:37:31 crc kubenswrapper[4665]: I1205 01:37:31.511734 4665 scope.go:117] "RemoveContainer" containerID="b014d93b6d1ffc0abbec7266cdb1929e882757c6c87d997025d3c287a9fdd928" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.251375 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2"] Dec 05 01:37:37 crc kubenswrapper[4665]: E1205 01:37:37.253595 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" containerName="dnsmasq-dns" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.253685 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" containerName="dnsmasq-dns" Dec 05 01:37:37 crc kubenswrapper[4665]: E1205 01:37:37.253780 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" containerName="init" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.253848 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" containerName="init" Dec 05 01:37:37 crc kubenswrapper[4665]: E1205 01:37:37.253920 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" containerName="init" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.253983 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" containerName="init" Dec 05 01:37:37 crc kubenswrapper[4665]: E1205 01:37:37.254065 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" containerName="dnsmasq-dns" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.254131 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" containerName="dnsmasq-dns" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.254459 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a14df5c5-95ad-4881-ab5a-d21c81eb6aad" containerName="dnsmasq-dns" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.254560 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="85a5bed1-d9d1-4edd-8c7b-99b209aac6e2" containerName="dnsmasq-dns" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.255438 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.257627 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2"] Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.259871 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.261716 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.261789 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.264805 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.292950 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.293159 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6655\" (UniqueName: \"kubernetes.io/projected/f3777f3d-f5e6-479d-947b-baf234749487-kube-api-access-k6655\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.293243 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.293311 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.395353 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.395440 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6655\" (UniqueName: \"kubernetes.io/projected/f3777f3d-f5e6-479d-947b-baf234749487-kube-api-access-k6655\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.395471 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.395495 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.403346 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.404881 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.405917 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.414621 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6655\" (UniqueName: \"kubernetes.io/projected/f3777f3d-f5e6-479d-947b-baf234749487-kube-api-access-k6655\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:37 crc kubenswrapper[4665]: I1205 01:37:37.607275 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:37:39 crc kubenswrapper[4665]: I1205 01:37:39.070064 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2"] Dec 05 01:37:39 crc kubenswrapper[4665]: W1205 01:37:39.070286 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf3777f3d_f5e6_479d_947b_baf234749487.slice/crio-9c9ad5ea1fcd93857609fb657c44f98f5a57ad3900f89a24aa156af04b3e9c5b WatchSource:0}: Error finding container 9c9ad5ea1fcd93857609fb657c44f98f5a57ad3900f89a24aa156af04b3e9c5b: Status 404 returned error can't find the container with id 9c9ad5ea1fcd93857609fb657c44f98f5a57ad3900f89a24aa156af04b3e9c5b Dec 05 01:37:39 crc kubenswrapper[4665]: I1205 01:37:39.130582 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" event={"ID":"f3777f3d-f5e6-479d-947b-baf234749487","Type":"ContainerStarted","Data":"9c9ad5ea1fcd93857609fb657c44f98f5a57ad3900f89a24aa156af04b3e9c5b"} Dec 05 01:37:39 crc kubenswrapper[4665]: I1205 01:37:39.997455 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 01:37:41 crc kubenswrapper[4665]: I1205 01:37:41.251804 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 01:37:44 crc kubenswrapper[4665]: I1205 01:37:44.923402 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:37:44 crc kubenswrapper[4665]: I1205 01:37:44.924046 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:37:54 crc kubenswrapper[4665]: I1205 01:37:54.297208 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" event={"ID":"f3777f3d-f5e6-479d-947b-baf234749487","Type":"ContainerStarted","Data":"a40ea3ed6aac59c11ba6c9a00d3b3903721bdc8ea35b4361bbb70b7383beb7d1"} Dec 05 01:37:54 crc kubenswrapper[4665]: I1205 01:37:54.315186 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" podStartSLOduration=2.530918846 podStartE2EDuration="17.315167185s" podCreationTimestamp="2025-12-05 01:37:37 +0000 UTC" firstStartedPulling="2025-12-05 01:37:39.072832371 +0000 UTC m=+1634.412224670" lastFinishedPulling="2025-12-05 01:37:53.85708071 +0000 UTC m=+1649.196473009" observedRunningTime="2025-12-05 01:37:54.314028117 +0000 UTC m=+1649.653420426" watchObservedRunningTime="2025-12-05 01:37:54.315167185 +0000 UTC m=+1649.654559484" Dec 05 01:38:06 crc kubenswrapper[4665]: I1205 01:38:06.399502 4665 generic.go:334] "Generic (PLEG): container finished" podID="f3777f3d-f5e6-479d-947b-baf234749487" containerID="a40ea3ed6aac59c11ba6c9a00d3b3903721bdc8ea35b4361bbb70b7383beb7d1" exitCode=0 Dec 05 01:38:06 crc kubenswrapper[4665]: I1205 01:38:06.399694 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" event={"ID":"f3777f3d-f5e6-479d-947b-baf234749487","Type":"ContainerDied","Data":"a40ea3ed6aac59c11ba6c9a00d3b3903721bdc8ea35b4361bbb70b7383beb7d1"} Dec 05 01:38:07 crc kubenswrapper[4665]: I1205 01:38:07.839052 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:38:07 crc kubenswrapper[4665]: I1205 01:38:07.976228 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-repo-setup-combined-ca-bundle\") pod \"f3777f3d-f5e6-479d-947b-baf234749487\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " Dec 05 01:38:07 crc kubenswrapper[4665]: I1205 01:38:07.976376 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-ssh-key\") pod \"f3777f3d-f5e6-479d-947b-baf234749487\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " Dec 05 01:38:07 crc kubenswrapper[4665]: I1205 01:38:07.976509 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-inventory\") pod \"f3777f3d-f5e6-479d-947b-baf234749487\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " Dec 05 01:38:07 crc kubenswrapper[4665]: I1205 01:38:07.976547 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k6655\" (UniqueName: \"kubernetes.io/projected/f3777f3d-f5e6-479d-947b-baf234749487-kube-api-access-k6655\") pod \"f3777f3d-f5e6-479d-947b-baf234749487\" (UID: \"f3777f3d-f5e6-479d-947b-baf234749487\") " Dec 05 01:38:07 crc kubenswrapper[4665]: I1205 01:38:07.985541 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3777f3d-f5e6-479d-947b-baf234749487-kube-api-access-k6655" (OuterVolumeSpecName: "kube-api-access-k6655") pod "f3777f3d-f5e6-479d-947b-baf234749487" (UID: "f3777f3d-f5e6-479d-947b-baf234749487"). InnerVolumeSpecName "kube-api-access-k6655". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.003375 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "f3777f3d-f5e6-479d-947b-baf234749487" (UID: "f3777f3d-f5e6-479d-947b-baf234749487"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.007893 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f3777f3d-f5e6-479d-947b-baf234749487" (UID: "f3777f3d-f5e6-479d-947b-baf234749487"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.013561 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-inventory" (OuterVolumeSpecName: "inventory") pod "f3777f3d-f5e6-479d-947b-baf234749487" (UID: "f3777f3d-f5e6-479d-947b-baf234749487"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.079489 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.079849 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.079862 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k6655\" (UniqueName: \"kubernetes.io/projected/f3777f3d-f5e6-479d-947b-baf234749487-kube-api-access-k6655\") on node \"crc\" DevicePath \"\"" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.079872 4665 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/f3777f3d-f5e6-479d-947b-baf234749487-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.418869 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" event={"ID":"f3777f3d-f5e6-479d-947b-baf234749487","Type":"ContainerDied","Data":"9c9ad5ea1fcd93857609fb657c44f98f5a57ad3900f89a24aa156af04b3e9c5b"} Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.418906 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9c9ad5ea1fcd93857609fb657c44f98f5a57ad3900f89a24aa156af04b3e9c5b" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.418943 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.514419 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm"] Dec 05 01:38:08 crc kubenswrapper[4665]: E1205 01:38:08.515454 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3777f3d-f5e6-479d-947b-baf234749487" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.515476 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3777f3d-f5e6-479d-947b-baf234749487" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.515701 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3777f3d-f5e6-479d-947b-baf234749487" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.516719 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.525753 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.525831 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.525987 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.526286 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.560272 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm"] Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.689937 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qftdk\" (UniqueName: \"kubernetes.io/projected/61298cb7-8b67-4f94-bd96-ee4ec8189d00-kube-api-access-qftdk\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.690338 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.690668 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.792242 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.792393 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qftdk\" (UniqueName: \"kubernetes.io/projected/61298cb7-8b67-4f94-bd96-ee4ec8189d00-kube-api-access-qftdk\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.792449 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.798838 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.803890 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.812532 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qftdk\" (UniqueName: \"kubernetes.io/projected/61298cb7-8b67-4f94-bd96-ee4ec8189d00-kube-api-access-qftdk\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-4wjpm\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:08 crc kubenswrapper[4665]: I1205 01:38:08.843189 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:09 crc kubenswrapper[4665]: I1205 01:38:09.386681 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm"] Dec 05 01:38:09 crc kubenswrapper[4665]: I1205 01:38:09.441979 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" event={"ID":"61298cb7-8b67-4f94-bd96-ee4ec8189d00","Type":"ContainerStarted","Data":"8d577091ebf010daef77a068d95b1ce21bea5b7b5bf450629baf0ac17369e60a"} Dec 05 01:38:10 crc kubenswrapper[4665]: I1205 01:38:10.452226 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" event={"ID":"61298cb7-8b67-4f94-bd96-ee4ec8189d00","Type":"ContainerStarted","Data":"77ede3ccd2c89b583f7c2e63bc6f1bbe95619f16393865ca8447857cc46e219b"} Dec 05 01:38:10 crc kubenswrapper[4665]: I1205 01:38:10.480416 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" podStartSLOduration=2.263149329 podStartE2EDuration="2.48039491s" podCreationTimestamp="2025-12-05 01:38:08 +0000 UTC" firstStartedPulling="2025-12-05 01:38:09.411370586 +0000 UTC m=+1664.750762895" lastFinishedPulling="2025-12-05 01:38:09.628616177 +0000 UTC m=+1664.968008476" observedRunningTime="2025-12-05 01:38:10.479457348 +0000 UTC m=+1665.818849657" watchObservedRunningTime="2025-12-05 01:38:10.48039491 +0000 UTC m=+1665.819787209" Dec 05 01:38:13 crc kubenswrapper[4665]: I1205 01:38:13.478074 4665 generic.go:334] "Generic (PLEG): container finished" podID="61298cb7-8b67-4f94-bd96-ee4ec8189d00" containerID="77ede3ccd2c89b583f7c2e63bc6f1bbe95619f16393865ca8447857cc46e219b" exitCode=0 Dec 05 01:38:13 crc kubenswrapper[4665]: I1205 01:38:13.478159 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" event={"ID":"61298cb7-8b67-4f94-bd96-ee4ec8189d00","Type":"ContainerDied","Data":"77ede3ccd2c89b583f7c2e63bc6f1bbe95619f16393865ca8447857cc46e219b"} Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.863520 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.921952 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.922012 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.922064 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.922863 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.922927 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" gracePeriod=600 Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.933833 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qftdk\" (UniqueName: \"kubernetes.io/projected/61298cb7-8b67-4f94-bd96-ee4ec8189d00-kube-api-access-qftdk\") pod \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.934028 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-ssh-key\") pod \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.934178 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-inventory\") pod \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\" (UID: \"61298cb7-8b67-4f94-bd96-ee4ec8189d00\") " Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.942558 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61298cb7-8b67-4f94-bd96-ee4ec8189d00-kube-api-access-qftdk" (OuterVolumeSpecName: "kube-api-access-qftdk") pod "61298cb7-8b67-4f94-bd96-ee4ec8189d00" (UID: "61298cb7-8b67-4f94-bd96-ee4ec8189d00"). InnerVolumeSpecName "kube-api-access-qftdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.971105 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-inventory" (OuterVolumeSpecName: "inventory") pod "61298cb7-8b67-4f94-bd96-ee4ec8189d00" (UID: "61298cb7-8b67-4f94-bd96-ee4ec8189d00"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:38:14 crc kubenswrapper[4665]: I1205 01:38:14.978534 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61298cb7-8b67-4f94-bd96-ee4ec8189d00" (UID: "61298cb7-8b67-4f94-bd96-ee4ec8189d00"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.037408 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.037467 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qftdk\" (UniqueName: \"kubernetes.io/projected/61298cb7-8b67-4f94-bd96-ee4ec8189d00-kube-api-access-qftdk\") on node \"crc\" DevicePath \"\"" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.037486 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61298cb7-8b67-4f94-bd96-ee4ec8189d00-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:38:15 crc kubenswrapper[4665]: E1205 01:38:15.044202 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.499344 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" event={"ID":"61298cb7-8b67-4f94-bd96-ee4ec8189d00","Type":"ContainerDied","Data":"8d577091ebf010daef77a068d95b1ce21bea5b7b5bf450629baf0ac17369e60a"} Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.500047 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8d577091ebf010daef77a068d95b1ce21bea5b7b5bf450629baf0ac17369e60a" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.499357 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-4wjpm" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.502357 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" exitCode=0 Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.502421 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25"} Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.502472 4665 scope.go:117] "RemoveContainer" containerID="8528e05d6539c1b4845305ab27b265834c7200bd6a2bd4006fa1a98598856bbe" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.503584 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:38:15 crc kubenswrapper[4665]: E1205 01:38:15.503959 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.654798 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg"] Dec 05 01:38:15 crc kubenswrapper[4665]: E1205 01:38:15.655208 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61298cb7-8b67-4f94-bd96-ee4ec8189d00" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.655227 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="61298cb7-8b67-4f94-bd96-ee4ec8189d00" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.655465 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="61298cb7-8b67-4f94-bd96-ee4ec8189d00" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.656064 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.660070 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.660100 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.660268 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.660820 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.676900 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg"] Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.759774 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.759987 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.760045 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.760180 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rfk6w\" (UniqueName: \"kubernetes.io/projected/dd675614-41e7-40e1-b09d-639e6ed250fb-kube-api-access-rfk6w\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.861950 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.862060 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rfk6w\" (UniqueName: \"kubernetes.io/projected/dd675614-41e7-40e1-b09d-639e6ed250fb-kube-api-access-rfk6w\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.862119 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.862180 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.866935 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.867332 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.869936 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.878944 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rfk6w\" (UniqueName: \"kubernetes.io/projected/dd675614-41e7-40e1-b09d-639e6ed250fb-kube-api-access-rfk6w\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:15 crc kubenswrapper[4665]: I1205 01:38:15.980675 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:38:16 crc kubenswrapper[4665]: I1205 01:38:16.534575 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg"] Dec 05 01:38:17 crc kubenswrapper[4665]: I1205 01:38:17.522944 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" event={"ID":"dd675614-41e7-40e1-b09d-639e6ed250fb","Type":"ContainerStarted","Data":"f130a1188b0c3ebb7a785e8b362e242ca6f5ffce5fb9e3d8ebc41ad81112e508"} Dec 05 01:38:17 crc kubenswrapper[4665]: I1205 01:38:17.523524 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" event={"ID":"dd675614-41e7-40e1-b09d-639e6ed250fb","Type":"ContainerStarted","Data":"ce83604312b35436130fbe8a2813e031c349c234e93cf90edf99fd3584235eae"} Dec 05 01:38:17 crc kubenswrapper[4665]: I1205 01:38:17.549217 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" podStartSLOduration=2.373358698 podStartE2EDuration="2.549182931s" podCreationTimestamp="2025-12-05 01:38:15 +0000 UTC" firstStartedPulling="2025-12-05 01:38:16.539403967 +0000 UTC m=+1671.878796266" lastFinishedPulling="2025-12-05 01:38:16.7152282 +0000 UTC m=+1672.054620499" observedRunningTime="2025-12-05 01:38:17.540421448 +0000 UTC m=+1672.879813757" watchObservedRunningTime="2025-12-05 01:38:17.549182931 +0000 UTC m=+1672.888575230" Dec 05 01:38:28 crc kubenswrapper[4665]: I1205 01:38:28.893771 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:38:28 crc kubenswrapper[4665]: E1205 01:38:28.894513 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:38:31 crc kubenswrapper[4665]: I1205 01:38:31.727795 4665 scope.go:117] "RemoveContainer" containerID="eb59fe1d1b872f3c8fc8b79d8d750264f43eac4650a8ce7a479a026dcaf2977e" Dec 05 01:38:31 crc kubenswrapper[4665]: I1205 01:38:31.750894 4665 scope.go:117] "RemoveContainer" containerID="1564ac14287cf31d6097d9826fb090d1656d825413fdc3a01781fda810787970" Dec 05 01:38:31 crc kubenswrapper[4665]: I1205 01:38:31.810887 4665 scope.go:117] "RemoveContainer" containerID="5c83a0ba54c84a14601b273a514ae7b3c9c644d5c392af546032fb2c3a6bd6b7" Dec 05 01:38:31 crc kubenswrapper[4665]: I1205 01:38:31.835937 4665 scope.go:117] "RemoveContainer" containerID="6f7bf95f01d7fe181f2675415ede9c1dfd8b91f7861a5467a34bb26e87d4d8b3" Dec 05 01:38:31 crc kubenswrapper[4665]: I1205 01:38:31.854775 4665 scope.go:117] "RemoveContainer" containerID="4262c2a6d6acc16e3edda86aafa5487037d44b0b9374b4972573af34a7f5279b" Dec 05 01:38:42 crc kubenswrapper[4665]: I1205 01:38:42.894787 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:38:42 crc kubenswrapper[4665]: E1205 01:38:42.895670 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:38:56 crc kubenswrapper[4665]: I1205 01:38:56.893474 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:38:56 crc kubenswrapper[4665]: E1205 01:38:56.894196 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:39:11 crc kubenswrapper[4665]: I1205 01:39:11.893644 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:39:11 crc kubenswrapper[4665]: E1205 01:39:11.894601 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:39:24 crc kubenswrapper[4665]: I1205 01:39:24.902783 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:39:24 crc kubenswrapper[4665]: E1205 01:39:24.903549 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:39:31 crc kubenswrapper[4665]: I1205 01:39:31.926139 4665 scope.go:117] "RemoveContainer" containerID="50163aad77345be6bf450a88e33395654a9c4ec4bb30ce71e409fdf2e02e09d2" Dec 05 01:39:31 crc kubenswrapper[4665]: I1205 01:39:31.947028 4665 scope.go:117] "RemoveContainer" containerID="4beef3c7d2d461d99f18a2ec654e3e26b1e5f78b1a90255d2bd4ba7feee12382" Dec 05 01:39:35 crc kubenswrapper[4665]: I1205 01:39:35.894599 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:39:35 crc kubenswrapper[4665]: E1205 01:39:35.895423 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:39:48 crc kubenswrapper[4665]: I1205 01:39:48.893813 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:39:48 crc kubenswrapper[4665]: E1205 01:39:48.894713 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:39:59 crc kubenswrapper[4665]: I1205 01:39:59.894056 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:39:59 crc kubenswrapper[4665]: E1205 01:39:59.894582 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:40:13 crc kubenswrapper[4665]: I1205 01:40:13.893625 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:40:13 crc kubenswrapper[4665]: E1205 01:40:13.894524 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:40:24 crc kubenswrapper[4665]: I1205 01:40:24.902350 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:40:24 crc kubenswrapper[4665]: E1205 01:40:24.904514 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:40:31 crc kubenswrapper[4665]: I1205 01:40:31.998203 4665 scope.go:117] "RemoveContainer" containerID="a6f87fdd628665a05f239acf16f82df5b6181712b6260719e3be4633c4dcbfe3" Dec 05 01:40:37 crc kubenswrapper[4665]: I1205 01:40:37.893757 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:40:37 crc kubenswrapper[4665]: E1205 01:40:37.895919 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:40:50 crc kubenswrapper[4665]: I1205 01:40:50.893340 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:40:50 crc kubenswrapper[4665]: E1205 01:40:50.894907 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.400507 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f8n8g"] Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.403130 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.420526 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f8n8g"] Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.474529 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-catalog-content\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.474773 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-utilities\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.475019 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gl2q2\" (UniqueName: \"kubernetes.io/projected/84ffd979-df9b-4e65-9619-30d495ded451-kube-api-access-gl2q2\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.576814 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-catalog-content\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.576897 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-utilities\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.576944 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gl2q2\" (UniqueName: \"kubernetes.io/projected/84ffd979-df9b-4e65-9619-30d495ded451-kube-api-access-gl2q2\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.577254 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-catalog-content\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.577416 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-utilities\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.594328 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7bf4z"] Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.596192 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.625724 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gl2q2\" (UniqueName: \"kubernetes.io/projected/84ffd979-df9b-4e65-9619-30d495ded451-kube-api-access-gl2q2\") pod \"community-operators-f8n8g\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.629549 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7bf4z"] Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.773090 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.780082 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-utilities\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.780908 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-catalog-content\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.781137 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-khmxb\" (UniqueName: \"kubernetes.io/projected/d8c9de4b-5255-4941-a97e-0458605df194-kube-api-access-khmxb\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.882933 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-utilities\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.883283 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-catalog-content\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.883397 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-utilities\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.883606 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-khmxb\" (UniqueName: \"kubernetes.io/projected/d8c9de4b-5255-4941-a97e-0458605df194-kube-api-access-khmxb\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.883636 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-catalog-content\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.902914 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-khmxb\" (UniqueName: \"kubernetes.io/projected/d8c9de4b-5255-4941-a97e-0458605df194-kube-api-access-khmxb\") pod \"redhat-marketplace-7bf4z\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:40:59 crc kubenswrapper[4665]: I1205 01:40:59.965603 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:41:00 crc kubenswrapper[4665]: I1205 01:41:00.301158 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f8n8g"] Dec 05 01:41:00 crc kubenswrapper[4665]: I1205 01:41:00.455247 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7bf4z"] Dec 05 01:41:00 crc kubenswrapper[4665]: W1205 01:41:00.459479 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd8c9de4b_5255_4941_a97e_0458605df194.slice/crio-7b8d62e55498277762cf66702e70562a8a4ee578016f8995f4cd34d61f024531 WatchSource:0}: Error finding container 7b8d62e55498277762cf66702e70562a8a4ee578016f8995f4cd34d61f024531: Status 404 returned error can't find the container with id 7b8d62e55498277762cf66702e70562a8a4ee578016f8995f4cd34d61f024531 Dec 05 01:41:01 crc kubenswrapper[4665]: I1205 01:41:01.095396 4665 generic.go:334] "Generic (PLEG): container finished" podID="84ffd979-df9b-4e65-9619-30d495ded451" containerID="ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31" exitCode=0 Dec 05 01:41:01 crc kubenswrapper[4665]: I1205 01:41:01.095504 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8n8g" event={"ID":"84ffd979-df9b-4e65-9619-30d495ded451","Type":"ContainerDied","Data":"ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31"} Dec 05 01:41:01 crc kubenswrapper[4665]: I1205 01:41:01.095791 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8n8g" event={"ID":"84ffd979-df9b-4e65-9619-30d495ded451","Type":"ContainerStarted","Data":"358eb1a09fbb3f6f65018d894dd93736cf62e391c2782c40f377750a3fdd6c26"} Dec 05 01:41:01 crc kubenswrapper[4665]: I1205 01:41:01.097641 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 01:41:01 crc kubenswrapper[4665]: I1205 01:41:01.097709 4665 generic.go:334] "Generic (PLEG): container finished" podID="d8c9de4b-5255-4941-a97e-0458605df194" containerID="cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d" exitCode=0 Dec 05 01:41:01 crc kubenswrapper[4665]: I1205 01:41:01.097743 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7bf4z" event={"ID":"d8c9de4b-5255-4941-a97e-0458605df194","Type":"ContainerDied","Data":"cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d"} Dec 05 01:41:01 crc kubenswrapper[4665]: I1205 01:41:01.097766 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7bf4z" event={"ID":"d8c9de4b-5255-4941-a97e-0458605df194","Type":"ContainerStarted","Data":"7b8d62e55498277762cf66702e70562a8a4ee578016f8995f4cd34d61f024531"} Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.016354 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-stnml"] Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.018699 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.030132 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-stnml"] Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.114623 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7bf4z" event={"ID":"d8c9de4b-5255-4941-a97e-0458605df194","Type":"ContainerStarted","Data":"35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354"} Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.160385 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rhlmj\" (UniqueName: \"kubernetes.io/projected/e516cd1d-2aec-4eab-857d-635f8c4869cc-kube-api-access-rhlmj\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.160447 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-utilities\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.160476 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-catalog-content\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.262221 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-catalog-content\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.262405 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rhlmj\" (UniqueName: \"kubernetes.io/projected/e516cd1d-2aec-4eab-857d-635f8c4869cc-kube-api-access-rhlmj\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.262453 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-utilities\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.262866 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-utilities\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.263122 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-catalog-content\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.284943 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rhlmj\" (UniqueName: \"kubernetes.io/projected/e516cd1d-2aec-4eab-857d-635f8c4869cc-kube-api-access-rhlmj\") pod \"certified-operators-stnml\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.436503 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:02 crc kubenswrapper[4665]: I1205 01:41:02.895109 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:41:02 crc kubenswrapper[4665]: E1205 01:41:02.895693 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:41:03 crc kubenswrapper[4665]: I1205 01:41:03.008649 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-stnml"] Dec 05 01:41:03 crc kubenswrapper[4665]: I1205 01:41:03.087962 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-ng2qn"] Dec 05 01:41:03 crc kubenswrapper[4665]: I1205 01:41:03.112627 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-ng2qn"] Dec 05 01:41:03 crc kubenswrapper[4665]: I1205 01:41:03.136109 4665 generic.go:334] "Generic (PLEG): container finished" podID="d8c9de4b-5255-4941-a97e-0458605df194" containerID="35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354" exitCode=0 Dec 05 01:41:03 crc kubenswrapper[4665]: I1205 01:41:03.136195 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7bf4z" event={"ID":"d8c9de4b-5255-4941-a97e-0458605df194","Type":"ContainerDied","Data":"35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354"} Dec 05 01:41:03 crc kubenswrapper[4665]: I1205 01:41:03.142002 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8n8g" event={"ID":"84ffd979-df9b-4e65-9619-30d495ded451","Type":"ContainerStarted","Data":"7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18"} Dec 05 01:41:03 crc kubenswrapper[4665]: I1205 01:41:03.143882 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stnml" event={"ID":"e516cd1d-2aec-4eab-857d-635f8c4869cc","Type":"ContainerStarted","Data":"4cf89e80dcaa102e9f01c5bdc325ec015791605e2bae83a07958d55ffe060297"} Dec 05 01:41:04 crc kubenswrapper[4665]: I1205 01:41:04.154463 4665 generic.go:334] "Generic (PLEG): container finished" podID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerID="de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f" exitCode=0 Dec 05 01:41:04 crc kubenswrapper[4665]: I1205 01:41:04.154541 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stnml" event={"ID":"e516cd1d-2aec-4eab-857d-635f8c4869cc","Type":"ContainerDied","Data":"de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f"} Dec 05 01:41:04 crc kubenswrapper[4665]: I1205 01:41:04.157036 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7bf4z" event={"ID":"d8c9de4b-5255-4941-a97e-0458605df194","Type":"ContainerStarted","Data":"246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0"} Dec 05 01:41:04 crc kubenswrapper[4665]: I1205 01:41:04.192800 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7bf4z" podStartSLOduration=2.679134693 podStartE2EDuration="5.192770283s" podCreationTimestamp="2025-12-05 01:40:59 +0000 UTC" firstStartedPulling="2025-12-05 01:41:01.100855229 +0000 UTC m=+1836.440247518" lastFinishedPulling="2025-12-05 01:41:03.614490809 +0000 UTC m=+1838.953883108" observedRunningTime="2025-12-05 01:41:04.191612244 +0000 UTC m=+1839.531004553" watchObservedRunningTime="2025-12-05 01:41:04.192770283 +0000 UTC m=+1839.532162582" Dec 05 01:41:04 crc kubenswrapper[4665]: I1205 01:41:04.905095 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b" path="/var/lib/kubelet/pods/fb936364-9f9d-41a9-bdc6-1c19ab8e0b1b/volumes" Dec 05 01:41:05 crc kubenswrapper[4665]: I1205 01:41:05.168683 4665 generic.go:334] "Generic (PLEG): container finished" podID="84ffd979-df9b-4e65-9619-30d495ded451" containerID="7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18" exitCode=0 Dec 05 01:41:05 crc kubenswrapper[4665]: I1205 01:41:05.168725 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8n8g" event={"ID":"84ffd979-df9b-4e65-9619-30d495ded451","Type":"ContainerDied","Data":"7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18"} Dec 05 01:41:05 crc kubenswrapper[4665]: I1205 01:41:05.175736 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stnml" event={"ID":"e516cd1d-2aec-4eab-857d-635f8c4869cc","Type":"ContainerStarted","Data":"ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192"} Dec 05 01:41:06 crc kubenswrapper[4665]: I1205 01:41:06.039704 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-243a-account-create-update-kb9j2"] Dec 05 01:41:06 crc kubenswrapper[4665]: I1205 01:41:06.057435 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-243a-account-create-update-kb9j2"] Dec 05 01:41:06 crc kubenswrapper[4665]: I1205 01:41:06.188180 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8n8g" event={"ID":"84ffd979-df9b-4e65-9619-30d495ded451","Type":"ContainerStarted","Data":"d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633"} Dec 05 01:41:06 crc kubenswrapper[4665]: I1205 01:41:06.191655 4665 generic.go:334] "Generic (PLEG): container finished" podID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerID="ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192" exitCode=0 Dec 05 01:41:06 crc kubenswrapper[4665]: I1205 01:41:06.191699 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stnml" event={"ID":"e516cd1d-2aec-4eab-857d-635f8c4869cc","Type":"ContainerDied","Data":"ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192"} Dec 05 01:41:06 crc kubenswrapper[4665]: I1205 01:41:06.220894 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f8n8g" podStartSLOduration=2.611846071 podStartE2EDuration="7.220873133s" podCreationTimestamp="2025-12-05 01:40:59 +0000 UTC" firstStartedPulling="2025-12-05 01:41:01.097337713 +0000 UTC m=+1836.436730012" lastFinishedPulling="2025-12-05 01:41:05.706364775 +0000 UTC m=+1841.045757074" observedRunningTime="2025-12-05 01:41:06.214531568 +0000 UTC m=+1841.553923877" watchObservedRunningTime="2025-12-05 01:41:06.220873133 +0000 UTC m=+1841.560265432" Dec 05 01:41:06 crc kubenswrapper[4665]: I1205 01:41:06.903483 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="82bcac28-3f91-477d-a370-c510d062b2eb" path="/var/lib/kubelet/pods/82bcac28-3f91-477d-a370-c510d062b2eb/volumes" Dec 05 01:41:07 crc kubenswrapper[4665]: I1205 01:41:07.039990 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-98hr7"] Dec 05 01:41:07 crc kubenswrapper[4665]: I1205 01:41:07.056487 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-98hr7"] Dec 05 01:41:07 crc kubenswrapper[4665]: I1205 01:41:07.066742 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-cgc75"] Dec 05 01:41:07 crc kubenswrapper[4665]: I1205 01:41:07.079578 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-cc5a-account-create-update-9lwsp"] Dec 05 01:41:07 crc kubenswrapper[4665]: I1205 01:41:07.095266 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-cgc75"] Dec 05 01:41:07 crc kubenswrapper[4665]: I1205 01:41:07.104377 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-e5ee-account-create-update-sq82x"] Dec 05 01:41:07 crc kubenswrapper[4665]: I1205 01:41:07.117366 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-cc5a-account-create-update-9lwsp"] Dec 05 01:41:07 crc kubenswrapper[4665]: I1205 01:41:07.134323 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-e5ee-account-create-update-sq82x"] Dec 05 01:41:08 crc kubenswrapper[4665]: I1205 01:41:08.220149 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stnml" event={"ID":"e516cd1d-2aec-4eab-857d-635f8c4869cc","Type":"ContainerStarted","Data":"8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade"} Dec 05 01:41:08 crc kubenswrapper[4665]: I1205 01:41:08.243448 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-stnml" podStartSLOduration=4.523395679 podStartE2EDuration="7.243429167s" podCreationTimestamp="2025-12-05 01:41:01 +0000 UTC" firstStartedPulling="2025-12-05 01:41:04.292053226 +0000 UTC m=+1839.631445525" lastFinishedPulling="2025-12-05 01:41:07.012086724 +0000 UTC m=+1842.351479013" observedRunningTime="2025-12-05 01:41:08.237075653 +0000 UTC m=+1843.576467952" watchObservedRunningTime="2025-12-05 01:41:08.243429167 +0000 UTC m=+1843.582821466" Dec 05 01:41:08 crc kubenswrapper[4665]: I1205 01:41:08.904462 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ef8c440-1a96-47cd-a75a-5d4df00edda5" path="/var/lib/kubelet/pods/4ef8c440-1a96-47cd-a75a-5d4df00edda5/volumes" Dec 05 01:41:08 crc kubenswrapper[4665]: I1205 01:41:08.905260 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42" path="/var/lib/kubelet/pods/9f5c28dd-5ab6-4dd1-ad3e-ea0858296e42/volumes" Dec 05 01:41:08 crc kubenswrapper[4665]: I1205 01:41:08.906000 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0780418-7936-4123-9a41-b3a7e6b22b9d" path="/var/lib/kubelet/pods/a0780418-7936-4123-9a41-b3a7e6b22b9d/volumes" Dec 05 01:41:08 crc kubenswrapper[4665]: I1205 01:41:08.906885 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f748e17a-771b-4153-ab91-1e105e392917" path="/var/lib/kubelet/pods/f748e17a-771b-4153-ab91-1e105e392917/volumes" Dec 05 01:41:09 crc kubenswrapper[4665]: I1205 01:41:09.774347 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:41:09 crc kubenswrapper[4665]: I1205 01:41:09.774687 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:41:09 crc kubenswrapper[4665]: I1205 01:41:09.967564 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:41:09 crc kubenswrapper[4665]: I1205 01:41:09.967619 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:41:10 crc kubenswrapper[4665]: I1205 01:41:10.008620 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:41:10 crc kubenswrapper[4665]: I1205 01:41:10.291421 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:41:10 crc kubenswrapper[4665]: I1205 01:41:10.814498 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-f8n8g" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="registry-server" probeResult="failure" output=< Dec 05 01:41:10 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 01:41:10 crc kubenswrapper[4665]: > Dec 05 01:41:11 crc kubenswrapper[4665]: I1205 01:41:11.988143 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7bf4z"] Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.263768 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7bf4z" podUID="d8c9de4b-5255-4941-a97e-0458605df194" containerName="registry-server" containerID="cri-o://246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0" gracePeriod=2 Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.437096 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.437467 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.488308 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.739775 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.895548 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-utilities\") pod \"d8c9de4b-5255-4941-a97e-0458605df194\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.895629 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-khmxb\" (UniqueName: \"kubernetes.io/projected/d8c9de4b-5255-4941-a97e-0458605df194-kube-api-access-khmxb\") pod \"d8c9de4b-5255-4941-a97e-0458605df194\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.895695 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-catalog-content\") pod \"d8c9de4b-5255-4941-a97e-0458605df194\" (UID: \"d8c9de4b-5255-4941-a97e-0458605df194\") " Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.896897 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-utilities" (OuterVolumeSpecName: "utilities") pod "d8c9de4b-5255-4941-a97e-0458605df194" (UID: "d8c9de4b-5255-4941-a97e-0458605df194"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.906936 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d8c9de4b-5255-4941-a97e-0458605df194-kube-api-access-khmxb" (OuterVolumeSpecName: "kube-api-access-khmxb") pod "d8c9de4b-5255-4941-a97e-0458605df194" (UID: "d8c9de4b-5255-4941-a97e-0458605df194"). InnerVolumeSpecName "kube-api-access-khmxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.919815 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d8c9de4b-5255-4941-a97e-0458605df194" (UID: "d8c9de4b-5255-4941-a97e-0458605df194"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.997625 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.997654 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-khmxb\" (UniqueName: \"kubernetes.io/projected/d8c9de4b-5255-4941-a97e-0458605df194-kube-api-access-khmxb\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:12 crc kubenswrapper[4665]: I1205 01:41:12.997665 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d8c9de4b-5255-4941-a97e-0458605df194-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.274062 4665 generic.go:334] "Generic (PLEG): container finished" podID="d8c9de4b-5255-4941-a97e-0458605df194" containerID="246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0" exitCode=0 Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.274143 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7bf4z" event={"ID":"d8c9de4b-5255-4941-a97e-0458605df194","Type":"ContainerDied","Data":"246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0"} Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.274204 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7bf4z" event={"ID":"d8c9de4b-5255-4941-a97e-0458605df194","Type":"ContainerDied","Data":"7b8d62e55498277762cf66702e70562a8a4ee578016f8995f4cd34d61f024531"} Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.274229 4665 scope.go:117] "RemoveContainer" containerID="246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.274165 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7bf4z" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.309859 4665 scope.go:117] "RemoveContainer" containerID="35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.322974 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7bf4z"] Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.332946 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7bf4z"] Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.337378 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.337872 4665 scope.go:117] "RemoveContainer" containerID="cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.386186 4665 scope.go:117] "RemoveContainer" containerID="246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0" Dec 05 01:41:13 crc kubenswrapper[4665]: E1205 01:41:13.388182 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0\": container with ID starting with 246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0 not found: ID does not exist" containerID="246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.388239 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0"} err="failed to get container status \"246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0\": rpc error: code = NotFound desc = could not find container \"246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0\": container with ID starting with 246b1d61cbb9821d9ab814b8ad25bf469102ef15c3aa626c5dc2c092eb1d61f0 not found: ID does not exist" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.388410 4665 scope.go:117] "RemoveContainer" containerID="35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354" Dec 05 01:41:13 crc kubenswrapper[4665]: E1205 01:41:13.388800 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354\": container with ID starting with 35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354 not found: ID does not exist" containerID="35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.388882 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354"} err="failed to get container status \"35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354\": rpc error: code = NotFound desc = could not find container \"35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354\": container with ID starting with 35b4c9356abef9199c3ed0487551670931f7abd9c98cbdd886b8157c753ef354 not found: ID does not exist" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.388922 4665 scope.go:117] "RemoveContainer" containerID="cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d" Dec 05 01:41:13 crc kubenswrapper[4665]: E1205 01:41:13.390248 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d\": container with ID starting with cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d not found: ID does not exist" containerID="cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d" Dec 05 01:41:13 crc kubenswrapper[4665]: I1205 01:41:13.390268 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d"} err="failed to get container status \"cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d\": rpc error: code = NotFound desc = could not find container \"cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d\": container with ID starting with cde698a1192622024389bbd184d71e5e933d6d80622f879275a3b284c740504d not found: ID does not exist" Dec 05 01:41:14 crc kubenswrapper[4665]: I1205 01:41:14.779724 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-stnml"] Dec 05 01:41:14 crc kubenswrapper[4665]: I1205 01:41:14.906008 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d8c9de4b-5255-4941-a97e-0458605df194" path="/var/lib/kubelet/pods/d8c9de4b-5255-4941-a97e-0458605df194/volumes" Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.292250 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-stnml" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerName="registry-server" containerID="cri-o://8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade" gracePeriod=2 Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.752635 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.864987 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-catalog-content\") pod \"e516cd1d-2aec-4eab-857d-635f8c4869cc\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.865101 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rhlmj\" (UniqueName: \"kubernetes.io/projected/e516cd1d-2aec-4eab-857d-635f8c4869cc-kube-api-access-rhlmj\") pod \"e516cd1d-2aec-4eab-857d-635f8c4869cc\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.865312 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-utilities\") pod \"e516cd1d-2aec-4eab-857d-635f8c4869cc\" (UID: \"e516cd1d-2aec-4eab-857d-635f8c4869cc\") " Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.866145 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-utilities" (OuterVolumeSpecName: "utilities") pod "e516cd1d-2aec-4eab-857d-635f8c4869cc" (UID: "e516cd1d-2aec-4eab-857d-635f8c4869cc"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.870651 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e516cd1d-2aec-4eab-857d-635f8c4869cc-kube-api-access-rhlmj" (OuterVolumeSpecName: "kube-api-access-rhlmj") pod "e516cd1d-2aec-4eab-857d-635f8c4869cc" (UID: "e516cd1d-2aec-4eab-857d-635f8c4869cc"). InnerVolumeSpecName "kube-api-access-rhlmj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.894631 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:41:15 crc kubenswrapper[4665]: E1205 01:41:15.895378 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.908603 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e516cd1d-2aec-4eab-857d-635f8c4869cc" (UID: "e516cd1d-2aec-4eab-857d-635f8c4869cc"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.970215 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.970266 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rhlmj\" (UniqueName: \"kubernetes.io/projected/e516cd1d-2aec-4eab-857d-635f8c4869cc-kube-api-access-rhlmj\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:15 crc kubenswrapper[4665]: I1205 01:41:15.970288 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e516cd1d-2aec-4eab-857d-635f8c4869cc-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.307693 4665 generic.go:334] "Generic (PLEG): container finished" podID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerID="8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade" exitCode=0 Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.307858 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-stnml" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.308010 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stnml" event={"ID":"e516cd1d-2aec-4eab-857d-635f8c4869cc","Type":"ContainerDied","Data":"8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade"} Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.308051 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-stnml" event={"ID":"e516cd1d-2aec-4eab-857d-635f8c4869cc","Type":"ContainerDied","Data":"4cf89e80dcaa102e9f01c5bdc325ec015791605e2bae83a07958d55ffe060297"} Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.308187 4665 scope.go:117] "RemoveContainer" containerID="8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.344281 4665 scope.go:117] "RemoveContainer" containerID="ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.346485 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-stnml"] Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.363172 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-stnml"] Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.365335 4665 scope.go:117] "RemoveContainer" containerID="de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.411741 4665 scope.go:117] "RemoveContainer" containerID="8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade" Dec 05 01:41:16 crc kubenswrapper[4665]: E1205 01:41:16.413458 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade\": container with ID starting with 8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade not found: ID does not exist" containerID="8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.413500 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade"} err="failed to get container status \"8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade\": rpc error: code = NotFound desc = could not find container \"8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade\": container with ID starting with 8cd223d0070a169fd916344eedf717a8e3360b1e4617802d0205ebe15919eade not found: ID does not exist" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.413527 4665 scope.go:117] "RemoveContainer" containerID="ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192" Dec 05 01:41:16 crc kubenswrapper[4665]: E1205 01:41:16.413994 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192\": container with ID starting with ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192 not found: ID does not exist" containerID="ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.414030 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192"} err="failed to get container status \"ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192\": rpc error: code = NotFound desc = could not find container \"ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192\": container with ID starting with ae82a58bb8cb055a059422f7ff9b13a5d926e695260851d6bc725a01c5b66192 not found: ID does not exist" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.414053 4665 scope.go:117] "RemoveContainer" containerID="de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f" Dec 05 01:41:16 crc kubenswrapper[4665]: E1205 01:41:16.414364 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f\": container with ID starting with de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f not found: ID does not exist" containerID="de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.414440 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f"} err="failed to get container status \"de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f\": rpc error: code = NotFound desc = could not find container \"de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f\": container with ID starting with de1c06f1c54d73b7ea3502b655b07c91217fa69bb6ea2e7803e09cc91fa1971f not found: ID does not exist" Dec 05 01:41:16 crc kubenswrapper[4665]: I1205 01:41:16.905333 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" path="/var/lib/kubelet/pods/e516cd1d-2aec-4eab-857d-635f8c4869cc/volumes" Dec 05 01:41:19 crc kubenswrapper[4665]: I1205 01:41:19.832878 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:41:19 crc kubenswrapper[4665]: I1205 01:41:19.883227 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:41:20 crc kubenswrapper[4665]: I1205 01:41:20.409597 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f8n8g"] Dec 05 01:41:21 crc kubenswrapper[4665]: I1205 01:41:21.357457 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f8n8g" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="registry-server" containerID="cri-o://d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633" gracePeriod=2 Dec 05 01:41:21 crc kubenswrapper[4665]: I1205 01:41:21.851688 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.029273 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-catalog-content\") pod \"84ffd979-df9b-4e65-9619-30d495ded451\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.029382 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-utilities\") pod \"84ffd979-df9b-4e65-9619-30d495ded451\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.029437 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gl2q2\" (UniqueName: \"kubernetes.io/projected/84ffd979-df9b-4e65-9619-30d495ded451-kube-api-access-gl2q2\") pod \"84ffd979-df9b-4e65-9619-30d495ded451\" (UID: \"84ffd979-df9b-4e65-9619-30d495ded451\") " Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.030191 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-utilities" (OuterVolumeSpecName: "utilities") pod "84ffd979-df9b-4e65-9619-30d495ded451" (UID: "84ffd979-df9b-4e65-9619-30d495ded451"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.036895 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/84ffd979-df9b-4e65-9619-30d495ded451-kube-api-access-gl2q2" (OuterVolumeSpecName: "kube-api-access-gl2q2") pod "84ffd979-df9b-4e65-9619-30d495ded451" (UID: "84ffd979-df9b-4e65-9619-30d495ded451"). InnerVolumeSpecName "kube-api-access-gl2q2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.078756 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "84ffd979-df9b-4e65-9619-30d495ded451" (UID: "84ffd979-df9b-4e65-9619-30d495ded451"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.131660 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.131688 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gl2q2\" (UniqueName: \"kubernetes.io/projected/84ffd979-df9b-4e65-9619-30d495ded451-kube-api-access-gl2q2\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.131698 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/84ffd979-df9b-4e65-9619-30d495ded451-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.374079 4665 generic.go:334] "Generic (PLEG): container finished" podID="84ffd979-df9b-4e65-9619-30d495ded451" containerID="d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633" exitCode=0 Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.374154 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8n8g" event={"ID":"84ffd979-df9b-4e65-9619-30d495ded451","Type":"ContainerDied","Data":"d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633"} Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.374279 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f8n8g" event={"ID":"84ffd979-df9b-4e65-9619-30d495ded451","Type":"ContainerDied","Data":"358eb1a09fbb3f6f65018d894dd93736cf62e391c2782c40f377750a3fdd6c26"} Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.374348 4665 scope.go:117] "RemoveContainer" containerID="d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.374605 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f8n8g" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.414858 4665 scope.go:117] "RemoveContainer" containerID="7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.416073 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f8n8g"] Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.427698 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f8n8g"] Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.436365 4665 scope.go:117] "RemoveContainer" containerID="ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.473637 4665 scope.go:117] "RemoveContainer" containerID="d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633" Dec 05 01:41:22 crc kubenswrapper[4665]: E1205 01:41:22.474248 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633\": container with ID starting with d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633 not found: ID does not exist" containerID="d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.474328 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633"} err="failed to get container status \"d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633\": rpc error: code = NotFound desc = could not find container \"d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633\": container with ID starting with d508f0ebb7fe7651f6f9f43005bcb83c84e30131f3ba8fcad9509314468ee633 not found: ID does not exist" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.474358 4665 scope.go:117] "RemoveContainer" containerID="7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18" Dec 05 01:41:22 crc kubenswrapper[4665]: E1205 01:41:22.474791 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18\": container with ID starting with 7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18 not found: ID does not exist" containerID="7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.474829 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18"} err="failed to get container status \"7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18\": rpc error: code = NotFound desc = could not find container \"7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18\": container with ID starting with 7f765c4bfff100c6d4d6ee8d440a7f186e9be07cbc086a29e3907b0edb815d18 not found: ID does not exist" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.474859 4665 scope.go:117] "RemoveContainer" containerID="ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31" Dec 05 01:41:22 crc kubenswrapper[4665]: E1205 01:41:22.475145 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31\": container with ID starting with ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31 not found: ID does not exist" containerID="ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.475175 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31"} err="failed to get container status \"ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31\": rpc error: code = NotFound desc = could not find container \"ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31\": container with ID starting with ab3b6850dd6390b3d1e818e364f010f63228624fb291f420abf8199afa67da31 not found: ID does not exist" Dec 05 01:41:22 crc kubenswrapper[4665]: I1205 01:41:22.905082 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="84ffd979-df9b-4e65-9619-30d495ded451" path="/var/lib/kubelet/pods/84ffd979-df9b-4e65-9619-30d495ded451/volumes" Dec 05 01:41:26 crc kubenswrapper[4665]: I1205 01:41:26.054250 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-7056-account-create-update-g7mgf"] Dec 05 01:41:26 crc kubenswrapper[4665]: I1205 01:41:26.071108 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-7056-account-create-update-g7mgf"] Dec 05 01:41:26 crc kubenswrapper[4665]: I1205 01:41:26.905293 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99d012f6-fa77-4ff5-8dc6-7e1c48ec7365" path="/var/lib/kubelet/pods/99d012f6-fa77-4ff5-8dc6-7e1c48ec7365/volumes" Dec 05 01:41:29 crc kubenswrapper[4665]: I1205 01:41:29.893344 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:41:29 crc kubenswrapper[4665]: E1205 01:41:29.893821 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.035150 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-1f10-account-create-update-9c7nb"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.052767 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-9x5x9"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.066585 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-2l7ml"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.076570 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-3a7f-account-create-update-2b647"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.083926 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-9x5x9"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.091457 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-slrj8"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.098281 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-2l7ml"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.105245 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-1f10-account-create-update-9c7nb"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.112666 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-3a7f-account-create-update-2b647"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.119646 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-slrj8"] Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.905834 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="003173d7-ba20-4971-b03e-ba2fb5039ff7" path="/var/lib/kubelet/pods/003173d7-ba20-4971-b03e-ba2fb5039ff7/volumes" Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.906743 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27d97449-bfcc-4c56-8b48-b2f604661b77" path="/var/lib/kubelet/pods/27d97449-bfcc-4c56-8b48-b2f604661b77/volumes" Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.907551 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="52dcbd14-37d0-437d-882d-c05591461848" path="/var/lib/kubelet/pods/52dcbd14-37d0-437d-882d-c05591461848/volumes" Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.908345 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c4310ab2-2a7e-4c02-a58c-f50d4d85882d" path="/var/lib/kubelet/pods/c4310ab2-2a7e-4c02-a58c-f50d4d85882d/volumes" Dec 05 01:41:30 crc kubenswrapper[4665]: I1205 01:41:30.909661 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff695797-ac78-42f0-9a60-96aa898b80f5" path="/var/lib/kubelet/pods/ff695797-ac78-42f0-9a60-96aa898b80f5/volumes" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.060215 4665 scope.go:117] "RemoveContainer" containerID="c6c8aa33a13c7d1d9c1ee4c5644b01481286d1bdb55267a2efe5c79f8e33ce5e" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.088984 4665 scope.go:117] "RemoveContainer" containerID="1cfe9fc27e6fd8079a155b696bf1d3926da2eb4e68f054b07d29e1e50a18c8e5" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.164947 4665 scope.go:117] "RemoveContainer" containerID="3bfe5c309623ea488f44ab0c9a2f228dffff2035ed5b5589c330cbbff2241f9d" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.208920 4665 scope.go:117] "RemoveContainer" containerID="9a047dd4d1adad72c9ea3845955449ec2eaf74cb9e85f74aa754a7424476eb67" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.253157 4665 scope.go:117] "RemoveContainer" containerID="a03593562853dc2ab8ebd888203016bf6dcba76b9925d09c412a7c386554450d" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.292371 4665 scope.go:117] "RemoveContainer" containerID="dad5949dfd12b600add8c3b8f40d5ec60a1540da6bc6dc017c526d1c163ee2c6" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.334113 4665 scope.go:117] "RemoveContainer" containerID="4ea20b7b337765275b07b14cefa3b6e620096f9dfff68a7da27cf673493105d3" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.353729 4665 scope.go:117] "RemoveContainer" containerID="f0423d69eaf37e2dc2e226b2cbf451099aaab4c25212917156baad5df253a8dc" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.372406 4665 scope.go:117] "RemoveContainer" containerID="6be3f8e25d1ee34087b84793d71b5c0d62eec4714cfcfcd770178ddedfc81719" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.409878 4665 scope.go:117] "RemoveContainer" containerID="22afa371b02fddf8b83bfd2c688c8d7e334603b2179d1bf856ccc6e404dab94d" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.436096 4665 scope.go:117] "RemoveContainer" containerID="b1a2fc5429305e2d61757fbc7f663c8981a73ba2ac8c1c95d2fce208b8282b60" Dec 05 01:41:32 crc kubenswrapper[4665]: I1205 01:41:32.459216 4665 scope.go:117] "RemoveContainer" containerID="6268d8dbe2a6c32963cc62d42188e3ae89f908889a5513789cde856509c9a0fd" Dec 05 01:41:37 crc kubenswrapper[4665]: I1205 01:41:37.038374 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-v2dln"] Dec 05 01:41:37 crc kubenswrapper[4665]: I1205 01:41:37.047945 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-dd2gb"] Dec 05 01:41:37 crc kubenswrapper[4665]: I1205 01:41:37.057814 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-v2dln"] Dec 05 01:41:37 crc kubenswrapper[4665]: I1205 01:41:37.069992 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-dd2gb"] Dec 05 01:41:38 crc kubenswrapper[4665]: I1205 01:41:38.547737 4665 generic.go:334] "Generic (PLEG): container finished" podID="dd675614-41e7-40e1-b09d-639e6ed250fb" containerID="f130a1188b0c3ebb7a785e8b362e242ca6f5ffce5fb9e3d8ebc41ad81112e508" exitCode=0 Dec 05 01:41:38 crc kubenswrapper[4665]: I1205 01:41:38.548032 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" event={"ID":"dd675614-41e7-40e1-b09d-639e6ed250fb","Type":"ContainerDied","Data":"f130a1188b0c3ebb7a785e8b362e242ca6f5ffce5fb9e3d8ebc41ad81112e508"} Dec 05 01:41:38 crc kubenswrapper[4665]: I1205 01:41:38.904457 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25488b08-5c2b-47d5-a7c1-1c1609bf8dab" path="/var/lib/kubelet/pods/25488b08-5c2b-47d5-a7c1-1c1609bf8dab/volumes" Dec 05 01:41:38 crc kubenswrapper[4665]: I1205 01:41:38.905048 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="271549ee-1768-4a76-bbc9-d931689a6ad9" path="/var/lib/kubelet/pods/271549ee-1768-4a76-bbc9-d931689a6ad9/volumes" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.241916 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:41:40 crc kubenswrapper[4665]: E1205 01:41:40.245534 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.563840 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" event={"ID":"dd675614-41e7-40e1-b09d-639e6ed250fb","Type":"ContainerDied","Data":"ce83604312b35436130fbe8a2813e031c349c234e93cf90edf99fd3584235eae"} Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.564268 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ce83604312b35436130fbe8a2813e031c349c234e93cf90edf99fd3584235eae" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.573476 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.702975 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-ssh-key\") pod \"dd675614-41e7-40e1-b09d-639e6ed250fb\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.703273 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-bootstrap-combined-ca-bundle\") pod \"dd675614-41e7-40e1-b09d-639e6ed250fb\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.703512 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-inventory\") pod \"dd675614-41e7-40e1-b09d-639e6ed250fb\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.703699 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rfk6w\" (UniqueName: \"kubernetes.io/projected/dd675614-41e7-40e1-b09d-639e6ed250fb-kube-api-access-rfk6w\") pod \"dd675614-41e7-40e1-b09d-639e6ed250fb\" (UID: \"dd675614-41e7-40e1-b09d-639e6ed250fb\") " Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.710655 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "dd675614-41e7-40e1-b09d-639e6ed250fb" (UID: "dd675614-41e7-40e1-b09d-639e6ed250fb"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.710836 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd675614-41e7-40e1-b09d-639e6ed250fb-kube-api-access-rfk6w" (OuterVolumeSpecName: "kube-api-access-rfk6w") pod "dd675614-41e7-40e1-b09d-639e6ed250fb" (UID: "dd675614-41e7-40e1-b09d-639e6ed250fb"). InnerVolumeSpecName "kube-api-access-rfk6w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.733078 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "dd675614-41e7-40e1-b09d-639e6ed250fb" (UID: "dd675614-41e7-40e1-b09d-639e6ed250fb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.742972 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-inventory" (OuterVolumeSpecName: "inventory") pod "dd675614-41e7-40e1-b09d-639e6ed250fb" (UID: "dd675614-41e7-40e1-b09d-639e6ed250fb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.806348 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.806394 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rfk6w\" (UniqueName: \"kubernetes.io/projected/dd675614-41e7-40e1-b09d-639e6ed250fb-kube-api-access-rfk6w\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.806414 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:40 crc kubenswrapper[4665]: I1205 01:41:40.806427 4665 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dd675614-41e7-40e1-b09d-639e6ed250fb-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.572639 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.713481 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69"] Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.713948 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd675614-41e7-40e1-b09d-639e6ed250fb" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.713967 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd675614-41e7-40e1-b09d-639e6ed250fb" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.713990 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="extract-utilities" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.713997 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="extract-utilities" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.714011 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714017 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.714025 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8c9de4b-5255-4941-a97e-0458605df194" containerName="extract-content" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714031 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8c9de4b-5255-4941-a97e-0458605df194" containerName="extract-content" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.714047 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8c9de4b-5255-4941-a97e-0458605df194" containerName="extract-utilities" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714052 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8c9de4b-5255-4941-a97e-0458605df194" containerName="extract-utilities" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.714060 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="extract-content" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714065 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="extract-content" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.714080 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d8c9de4b-5255-4941-a97e-0458605df194" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714086 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d8c9de4b-5255-4941-a97e-0458605df194" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.714097 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714103 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.714113 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerName="extract-content" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714121 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerName="extract-content" Dec 05 01:41:41 crc kubenswrapper[4665]: E1205 01:41:41.714131 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerName="extract-utilities" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714137 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerName="extract-utilities" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714341 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd675614-41e7-40e1-b09d-639e6ed250fb" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714357 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="84ffd979-df9b-4e65-9619-30d495ded451" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714367 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="d8c9de4b-5255-4941-a97e-0458605df194" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.714377 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="e516cd1d-2aec-4eab-857d-635f8c4869cc" containerName="registry-server" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.715026 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.719423 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.719530 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.719657 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.728113 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69"] Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.732483 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.829356 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.829445 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bj2k9\" (UniqueName: \"kubernetes.io/projected/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-kube-api-access-bj2k9\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.829519 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.931503 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.931557 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bj2k9\" (UniqueName: \"kubernetes.io/projected/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-kube-api-access-bj2k9\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.931579 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.937150 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.944854 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:41 crc kubenswrapper[4665]: I1205 01:41:41.949859 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bj2k9\" (UniqueName: \"kubernetes.io/projected/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-kube-api-access-bj2k9\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7mx69\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:42 crc kubenswrapper[4665]: I1205 01:41:42.056744 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:41:42 crc kubenswrapper[4665]: I1205 01:41:42.628392 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69"] Dec 05 01:41:43 crc kubenswrapper[4665]: I1205 01:41:43.591760 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" event={"ID":"a185d71d-c81e-4faf-8c7a-c31c2ee82f31","Type":"ContainerStarted","Data":"c9af1ae92c269d878ea070a22011833c3028f17a67a8ac8a8b7b45e7a3b1300d"} Dec 05 01:41:43 crc kubenswrapper[4665]: I1205 01:41:43.592380 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" event={"ID":"a185d71d-c81e-4faf-8c7a-c31c2ee82f31","Type":"ContainerStarted","Data":"242fa8d829926fc93ddd8c6ee748ed73bbff703987f4bef39d4749b2161521fe"} Dec 05 01:41:43 crc kubenswrapper[4665]: I1205 01:41:43.613643 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" podStartSLOduration=2.417721113 podStartE2EDuration="2.613622524s" podCreationTimestamp="2025-12-05 01:41:41 +0000 UTC" firstStartedPulling="2025-12-05 01:41:42.636282131 +0000 UTC m=+1877.975674430" lastFinishedPulling="2025-12-05 01:41:42.832183542 +0000 UTC m=+1878.171575841" observedRunningTime="2025-12-05 01:41:43.611831691 +0000 UTC m=+1878.951223990" watchObservedRunningTime="2025-12-05 01:41:43.613622524 +0000 UTC m=+1878.953014823" Dec 05 01:41:52 crc kubenswrapper[4665]: I1205 01:41:52.894369 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:41:52 crc kubenswrapper[4665]: E1205 01:41:52.895229 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:42:07 crc kubenswrapper[4665]: I1205 01:42:07.893260 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:42:07 crc kubenswrapper[4665]: E1205 01:42:07.894117 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:42:19 crc kubenswrapper[4665]: I1205 01:42:19.039439 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-v6sj2"] Dec 05 01:42:19 crc kubenswrapper[4665]: I1205 01:42:19.048310 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-v6sj2"] Dec 05 01:42:20 crc kubenswrapper[4665]: I1205 01:42:20.894506 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:42:20 crc kubenswrapper[4665]: E1205 01:42:20.895119 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:42:20 crc kubenswrapper[4665]: I1205 01:42:20.905334 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a032ea63-dc16-4378-b848-9a4f1274f860" path="/var/lib/kubelet/pods/a032ea63-dc16-4378-b848-9a4f1274f860/volumes" Dec 05 01:42:31 crc kubenswrapper[4665]: I1205 01:42:31.893350 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:42:31 crc kubenswrapper[4665]: E1205 01:42:31.894013 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:42:32 crc kubenswrapper[4665]: I1205 01:42:32.028566 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-zhgm4"] Dec 05 01:42:32 crc kubenswrapper[4665]: I1205 01:42:32.036891 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-zhgm4"] Dec 05 01:42:32 crc kubenswrapper[4665]: I1205 01:42:32.730173 4665 scope.go:117] "RemoveContainer" containerID="622e5e533523abb21b92b9f2a80cae4cce61914fe581467d07841e59ba2b7bc7" Dec 05 01:42:32 crc kubenswrapper[4665]: I1205 01:42:32.765234 4665 scope.go:117] "RemoveContainer" containerID="1dff241f5ff1e4ad5c49917b0ce39d1d31a24a35e0777c4d7a9f4307f3bb9b39" Dec 05 01:42:32 crc kubenswrapper[4665]: I1205 01:42:32.803328 4665 scope.go:117] "RemoveContainer" containerID="84a93caaad018815bec32398a8c470b321fb95d76ff964205b4b18113dee3dcc" Dec 05 01:42:32 crc kubenswrapper[4665]: I1205 01:42:32.904065 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de47bb8c-ea83-4f9a-be28-5716b59d25ed" path="/var/lib/kubelet/pods/de47bb8c-ea83-4f9a-be28-5716b59d25ed/volumes" Dec 05 01:42:42 crc kubenswrapper[4665]: I1205 01:42:42.032481 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-xkf6f"] Dec 05 01:42:42 crc kubenswrapper[4665]: I1205 01:42:42.042401 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-xkf6f"] Dec 05 01:42:42 crc kubenswrapper[4665]: I1205 01:42:42.903152 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="329fcb4a-b83f-4831-989a-584868907b9c" path="/var/lib/kubelet/pods/329fcb4a-b83f-4831-989a-584868907b9c/volumes" Dec 05 01:42:45 crc kubenswrapper[4665]: I1205 01:42:45.894140 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:42:45 crc kubenswrapper[4665]: E1205 01:42:45.895041 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:42:53 crc kubenswrapper[4665]: I1205 01:42:53.040091 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-267l7"] Dec 05 01:42:53 crc kubenswrapper[4665]: I1205 01:42:53.048853 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-267l7"] Dec 05 01:42:54 crc kubenswrapper[4665]: I1205 01:42:54.041325 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-rkvs6"] Dec 05 01:42:54 crc kubenswrapper[4665]: I1205 01:42:54.051098 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-rkvs6"] Dec 05 01:42:54 crc kubenswrapper[4665]: I1205 01:42:54.913016 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="72b6fbf7-1bb1-45c3-97a1-61da90bd1a92" path="/var/lib/kubelet/pods/72b6fbf7-1bb1-45c3-97a1-61da90bd1a92/volumes" Dec 05 01:42:54 crc kubenswrapper[4665]: I1205 01:42:54.914679 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2a58335-982b-42ff-933c-f93d38fbb197" path="/var/lib/kubelet/pods/f2a58335-982b-42ff-933c-f93d38fbb197/volumes" Dec 05 01:42:57 crc kubenswrapper[4665]: I1205 01:42:57.893595 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:42:57 crc kubenswrapper[4665]: E1205 01:42:57.894579 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:43:11 crc kubenswrapper[4665]: I1205 01:43:11.910348 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:43:11 crc kubenswrapper[4665]: E1205 01:43:11.911310 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:43:24 crc kubenswrapper[4665]: I1205 01:43:24.929782 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:43:25 crc kubenswrapper[4665]: I1205 01:43:25.524200 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"5c570a9be3cb99220cfff24bbdba7c2a172ecd2885266f83ae0d281c3e6b2ef1"} Dec 05 01:43:29 crc kubenswrapper[4665]: I1205 01:43:29.555894 4665 generic.go:334] "Generic (PLEG): container finished" podID="a185d71d-c81e-4faf-8c7a-c31c2ee82f31" containerID="c9af1ae92c269d878ea070a22011833c3028f17a67a8ac8a8b7b45e7a3b1300d" exitCode=0 Dec 05 01:43:29 crc kubenswrapper[4665]: I1205 01:43:29.555992 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" event={"ID":"a185d71d-c81e-4faf-8c7a-c31c2ee82f31","Type":"ContainerDied","Data":"c9af1ae92c269d878ea070a22011833c3028f17a67a8ac8a8b7b45e7a3b1300d"} Dec 05 01:43:30 crc kubenswrapper[4665]: I1205 01:43:30.920180 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.078999 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-ssh-key\") pod \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.079315 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-inventory\") pod \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.079401 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bj2k9\" (UniqueName: \"kubernetes.io/projected/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-kube-api-access-bj2k9\") pod \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\" (UID: \"a185d71d-c81e-4faf-8c7a-c31c2ee82f31\") " Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.085515 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-kube-api-access-bj2k9" (OuterVolumeSpecName: "kube-api-access-bj2k9") pod "a185d71d-c81e-4faf-8c7a-c31c2ee82f31" (UID: "a185d71d-c81e-4faf-8c7a-c31c2ee82f31"). InnerVolumeSpecName "kube-api-access-bj2k9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.113996 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a185d71d-c81e-4faf-8c7a-c31c2ee82f31" (UID: "a185d71d-c81e-4faf-8c7a-c31c2ee82f31"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.117010 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-inventory" (OuterVolumeSpecName: "inventory") pod "a185d71d-c81e-4faf-8c7a-c31c2ee82f31" (UID: "a185d71d-c81e-4faf-8c7a-c31c2ee82f31"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.181667 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.181694 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bj2k9\" (UniqueName: \"kubernetes.io/projected/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-kube-api-access-bj2k9\") on node \"crc\" DevicePath \"\"" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.181704 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a185d71d-c81e-4faf-8c7a-c31c2ee82f31-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.575925 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" event={"ID":"a185d71d-c81e-4faf-8c7a-c31c2ee82f31","Type":"ContainerDied","Data":"242fa8d829926fc93ddd8c6ee748ed73bbff703987f4bef39d4749b2161521fe"} Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.575978 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="242fa8d829926fc93ddd8c6ee748ed73bbff703987f4bef39d4749b2161521fe" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.576053 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7mx69" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.667788 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr"] Dec 05 01:43:31 crc kubenswrapper[4665]: E1205 01:43:31.668268 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a185d71d-c81e-4faf-8c7a-c31c2ee82f31" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.668315 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a185d71d-c81e-4faf-8c7a-c31c2ee82f31" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.668643 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a185d71d-c81e-4faf-8c7a-c31c2ee82f31" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.669433 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.678164 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.678243 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.678416 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.678635 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.682636 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr"] Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.802762 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x8j6q\" (UniqueName: \"kubernetes.io/projected/16df9956-9395-4412-92c6-9635bf23c681-kube-api-access-x8j6q\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.802806 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.802961 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.905074 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.905197 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x8j6q\" (UniqueName: \"kubernetes.io/projected/16df9956-9395-4412-92c6-9635bf23c681-kube-api-access-x8j6q\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.905221 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.908863 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.908870 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:31 crc kubenswrapper[4665]: I1205 01:43:31.939055 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x8j6q\" (UniqueName: \"kubernetes.io/projected/16df9956-9395-4412-92c6-9635bf23c681-kube-api-access-x8j6q\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:32 crc kubenswrapper[4665]: I1205 01:43:32.012314 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:43:32 crc kubenswrapper[4665]: I1205 01:43:32.336529 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr"] Dec 05 01:43:32 crc kubenswrapper[4665]: W1205 01:43:32.340504 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod16df9956_9395_4412_92c6_9635bf23c681.slice/crio-bc4570d9c15a02a0cbad6ed267aa7352c5aa0409cd8f1aa3542f1662da192048 WatchSource:0}: Error finding container bc4570d9c15a02a0cbad6ed267aa7352c5aa0409cd8f1aa3542f1662da192048: Status 404 returned error can't find the container with id bc4570d9c15a02a0cbad6ed267aa7352c5aa0409cd8f1aa3542f1662da192048 Dec 05 01:43:32 crc kubenswrapper[4665]: I1205 01:43:32.586430 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" event={"ID":"16df9956-9395-4412-92c6-9635bf23c681","Type":"ContainerStarted","Data":"bc4570d9c15a02a0cbad6ed267aa7352c5aa0409cd8f1aa3542f1662da192048"} Dec 05 01:43:32 crc kubenswrapper[4665]: I1205 01:43:32.902800 4665 scope.go:117] "RemoveContainer" containerID="5fef24f589e5224b60fc5340c948e406a3d50e59590c069b578fed13877eabc9" Dec 05 01:43:32 crc kubenswrapper[4665]: I1205 01:43:32.967390 4665 scope.go:117] "RemoveContainer" containerID="4a837b7b6687971d541d6c711a9db225e2fb3d0353b364e895c3398c57d1df65" Dec 05 01:43:33 crc kubenswrapper[4665]: I1205 01:43:33.016318 4665 scope.go:117] "RemoveContainer" containerID="e50e77e5d3507d5185c3652341c8aa63f89eb40a660322833cb28f5ee4b2993e" Dec 05 01:43:33 crc kubenswrapper[4665]: I1205 01:43:33.054224 4665 scope.go:117] "RemoveContainer" containerID="082c94ad7f4d5cff3d790e9fecb6cc2e99265db8f114c713f8ebc5d992827ee8" Dec 05 01:43:33 crc kubenswrapper[4665]: I1205 01:43:33.598442 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" event={"ID":"16df9956-9395-4412-92c6-9635bf23c681","Type":"ContainerStarted","Data":"d7d761fa1649f9caa250a8cb7e103b3562d6d355afca410efdf27f503e5647d4"} Dec 05 01:43:35 crc kubenswrapper[4665]: I1205 01:43:35.042360 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" podStartSLOduration=3.847371351 podStartE2EDuration="4.042280317s" podCreationTimestamp="2025-12-05 01:43:31 +0000 UTC" firstStartedPulling="2025-12-05 01:43:32.342424022 +0000 UTC m=+1987.681816321" lastFinishedPulling="2025-12-05 01:43:32.537332988 +0000 UTC m=+1987.876725287" observedRunningTime="2025-12-05 01:43:33.61774087 +0000 UTC m=+1988.957133189" watchObservedRunningTime="2025-12-05 01:43:35.042280317 +0000 UTC m=+1990.381672606" Dec 05 01:43:35 crc kubenswrapper[4665]: I1205 01:43:35.045600 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-whn5c"] Dec 05 01:43:35 crc kubenswrapper[4665]: I1205 01:43:35.054013 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-whn5c"] Dec 05 01:43:35 crc kubenswrapper[4665]: I1205 01:43:35.064105 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-fc55-account-create-update-tlw7g"] Dec 05 01:43:35 crc kubenswrapper[4665]: I1205 01:43:35.073759 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-fc55-account-create-update-tlw7g"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.033214 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-2blsh"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.041701 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-977b-account-create-update-d9c7b"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.049142 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-1541-account-create-update-s9fdb"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.056983 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-1541-account-create-update-s9fdb"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.064827 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-977b-account-create-update-d9c7b"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.071824 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-hctnc"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.082282 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-2blsh"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.089419 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-hctnc"] Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.905402 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="42f0aba2-52cb-4679-8fac-bdd74b0f9f82" path="/var/lib/kubelet/pods/42f0aba2-52cb-4679-8fac-bdd74b0f9f82/volumes" Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.906388 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6623940a-3fe5-4bbc-a157-c88d2271609e" path="/var/lib/kubelet/pods/6623940a-3fe5-4bbc-a157-c88d2271609e/volumes" Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.907197 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="785a409c-4cf0-4d32-a459-576b739f4b4b" path="/var/lib/kubelet/pods/785a409c-4cf0-4d32-a459-576b739f4b4b/volumes" Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.908027 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d75b13ee-e8d3-4714-ad14-2b2c9cf993f9" path="/var/lib/kubelet/pods/d75b13ee-e8d3-4714-ad14-2b2c9cf993f9/volumes" Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.909490 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de4c510d-7346-4bf0-8319-ed0473a10044" path="/var/lib/kubelet/pods/de4c510d-7346-4bf0-8319-ed0473a10044/volumes" Dec 05 01:43:36 crc kubenswrapper[4665]: I1205 01:43:36.910528 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e60d2002-ca42-44ac-ac85-cb8c412a601e" path="/var/lib/kubelet/pods/e60d2002-ca42-44ac-ac85-cb8c412a601e/volumes" Dec 05 01:44:08 crc kubenswrapper[4665]: I1205 01:44:08.048576 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cclzq"] Dec 05 01:44:08 crc kubenswrapper[4665]: I1205 01:44:08.059430 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-cclzq"] Dec 05 01:44:08 crc kubenswrapper[4665]: I1205 01:44:08.906981 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ff4178e-958a-443a-9dc6-dc354c11fa89" path="/var/lib/kubelet/pods/4ff4178e-958a-443a-9dc6-dc354c11fa89/volumes" Dec 05 01:44:33 crc kubenswrapper[4665]: I1205 01:44:33.176857 4665 scope.go:117] "RemoveContainer" containerID="8ea7773851d03fe16dcd97c9aa0f4587137c3dda61fe5b51a5285a0b32bc37a2" Dec 05 01:44:33 crc kubenswrapper[4665]: I1205 01:44:33.200496 4665 scope.go:117] "RemoveContainer" containerID="c29d41fc6fc128c90014b25869450012b9718e0d40c2f09b76b54dc0855011bd" Dec 05 01:44:33 crc kubenswrapper[4665]: I1205 01:44:33.251811 4665 scope.go:117] "RemoveContainer" containerID="dcde7665c51229857c8d5871a4e63adba3479fb5bb1d317b9d296239aa69cba0" Dec 05 01:44:33 crc kubenswrapper[4665]: I1205 01:44:33.299280 4665 scope.go:117] "RemoveContainer" containerID="1f9eae47a55e4fec4525aadfa8eef6df356e430cea52a1d825d7b6906be3944d" Dec 05 01:44:33 crc kubenswrapper[4665]: I1205 01:44:33.340117 4665 scope.go:117] "RemoveContainer" containerID="79328700a66ab6a5b2884d3dfa0013bf7e95b9107d75067b5d862cbdda7a6587" Dec 05 01:44:33 crc kubenswrapper[4665]: I1205 01:44:33.382604 4665 scope.go:117] "RemoveContainer" containerID="9473e1fdb2fb5077ebf5ee7581dfe6695959730b26f42fb20affde5620abc60e" Dec 05 01:44:33 crc kubenswrapper[4665]: I1205 01:44:33.430546 4665 scope.go:117] "RemoveContainer" containerID="dc8a70a5ae1fa6e539e77b472bf1ff6e4c1530c436e82901e3c7ee664b8af2b1" Dec 05 01:44:49 crc kubenswrapper[4665]: I1205 01:44:49.289268 4665 generic.go:334] "Generic (PLEG): container finished" podID="16df9956-9395-4412-92c6-9635bf23c681" containerID="d7d761fa1649f9caa250a8cb7e103b3562d6d355afca410efdf27f503e5647d4" exitCode=0 Dec 05 01:44:49 crc kubenswrapper[4665]: I1205 01:44:49.289404 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" event={"ID":"16df9956-9395-4412-92c6-9635bf23c681","Type":"ContainerDied","Data":"d7d761fa1649f9caa250a8cb7e103b3562d6d355afca410efdf27f503e5647d4"} Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.684822 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.741923 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-inventory\") pod \"16df9956-9395-4412-92c6-9635bf23c681\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.741972 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x8j6q\" (UniqueName: \"kubernetes.io/projected/16df9956-9395-4412-92c6-9635bf23c681-kube-api-access-x8j6q\") pod \"16df9956-9395-4412-92c6-9635bf23c681\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.742055 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-ssh-key\") pod \"16df9956-9395-4412-92c6-9635bf23c681\" (UID: \"16df9956-9395-4412-92c6-9635bf23c681\") " Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.750101 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/16df9956-9395-4412-92c6-9635bf23c681-kube-api-access-x8j6q" (OuterVolumeSpecName: "kube-api-access-x8j6q") pod "16df9956-9395-4412-92c6-9635bf23c681" (UID: "16df9956-9395-4412-92c6-9635bf23c681"). InnerVolumeSpecName "kube-api-access-x8j6q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.775445 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-inventory" (OuterVolumeSpecName: "inventory") pod "16df9956-9395-4412-92c6-9635bf23c681" (UID: "16df9956-9395-4412-92c6-9635bf23c681"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.776008 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "16df9956-9395-4412-92c6-9635bf23c681" (UID: "16df9956-9395-4412-92c6-9635bf23c681"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.843684 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.843730 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x8j6q\" (UniqueName: \"kubernetes.io/projected/16df9956-9395-4412-92c6-9635bf23c681-kube-api-access-x8j6q\") on node \"crc\" DevicePath \"\"" Dec 05 01:44:50 crc kubenswrapper[4665]: I1205 01:44:50.843743 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/16df9956-9395-4412-92c6-9635bf23c681-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.307454 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" event={"ID":"16df9956-9395-4412-92c6-9635bf23c681","Type":"ContainerDied","Data":"bc4570d9c15a02a0cbad6ed267aa7352c5aa0409cd8f1aa3542f1662da192048"} Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.307496 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc4570d9c15a02a0cbad6ed267aa7352c5aa0409cd8f1aa3542f1662da192048" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.307499 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.384743 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6"] Dec 05 01:44:51 crc kubenswrapper[4665]: E1205 01:44:51.385177 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="16df9956-9395-4412-92c6-9635bf23c681" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.385202 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="16df9956-9395-4412-92c6-9635bf23c681" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.385465 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="16df9956-9395-4412-92c6-9635bf23c681" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.386236 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.391480 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.391605 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.391722 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.396952 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6"] Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.399683 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.455896 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nslzh\" (UniqueName: \"kubernetes.io/projected/61d15914-07c6-4782-b8e2-96ec816206fb-kube-api-access-nslzh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.456208 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.456362 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.558746 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.559152 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.559415 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nslzh\" (UniqueName: \"kubernetes.io/projected/61d15914-07c6-4782-b8e2-96ec816206fb-kube-api-access-nslzh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.564311 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.568546 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.576751 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nslzh\" (UniqueName: \"kubernetes.io/projected/61d15914-07c6-4782-b8e2-96ec816206fb-kube-api-access-nslzh\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:51 crc kubenswrapper[4665]: I1205 01:44:51.700798 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:44:52 crc kubenswrapper[4665]: I1205 01:44:52.221891 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6"] Dec 05 01:44:52 crc kubenswrapper[4665]: W1205 01:44:52.227509 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61d15914_07c6_4782_b8e2_96ec816206fb.slice/crio-6ef964fcb1d5c82af6a32da1984e3e9ae949d2b08f1ccfbc6d6da40cd5969aad WatchSource:0}: Error finding container 6ef964fcb1d5c82af6a32da1984e3e9ae949d2b08f1ccfbc6d6da40cd5969aad: Status 404 returned error can't find the container with id 6ef964fcb1d5c82af6a32da1984e3e9ae949d2b08f1ccfbc6d6da40cd5969aad Dec 05 01:44:52 crc kubenswrapper[4665]: I1205 01:44:52.316119 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" event={"ID":"61d15914-07c6-4782-b8e2-96ec816206fb","Type":"ContainerStarted","Data":"6ef964fcb1d5c82af6a32da1984e3e9ae949d2b08f1ccfbc6d6da40cd5969aad"} Dec 05 01:44:53 crc kubenswrapper[4665]: I1205 01:44:53.323852 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" event={"ID":"61d15914-07c6-4782-b8e2-96ec816206fb","Type":"ContainerStarted","Data":"f2c58dab5aba367fcb6f368057b3829388a9ea461d0f1eac9ccf0d6628b72477"} Dec 05 01:44:53 crc kubenswrapper[4665]: I1205 01:44:53.343595 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" podStartSLOduration=2.162497702 podStartE2EDuration="2.343579651s" podCreationTimestamp="2025-12-05 01:44:51 +0000 UTC" firstStartedPulling="2025-12-05 01:44:52.2296684 +0000 UTC m=+2067.569060699" lastFinishedPulling="2025-12-05 01:44:52.410750349 +0000 UTC m=+2067.750142648" observedRunningTime="2025-12-05 01:44:53.340579238 +0000 UTC m=+2068.679971537" watchObservedRunningTime="2025-12-05 01:44:53.343579651 +0000 UTC m=+2068.682971950" Dec 05 01:44:58 crc kubenswrapper[4665]: I1205 01:44:58.381680 4665 generic.go:334] "Generic (PLEG): container finished" podID="61d15914-07c6-4782-b8e2-96ec816206fb" containerID="f2c58dab5aba367fcb6f368057b3829388a9ea461d0f1eac9ccf0d6628b72477" exitCode=0 Dec 05 01:44:58 crc kubenswrapper[4665]: I1205 01:44:58.381777 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" event={"ID":"61d15914-07c6-4782-b8e2-96ec816206fb","Type":"ContainerDied","Data":"f2c58dab5aba367fcb6f368057b3829388a9ea461d0f1eac9ccf0d6628b72477"} Dec 05 01:44:59 crc kubenswrapper[4665]: I1205 01:44:59.972744 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.018081 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-inventory\") pod \"61d15914-07c6-4782-b8e2-96ec816206fb\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.018261 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-ssh-key\") pod \"61d15914-07c6-4782-b8e2-96ec816206fb\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.018448 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nslzh\" (UniqueName: \"kubernetes.io/projected/61d15914-07c6-4782-b8e2-96ec816206fb-kube-api-access-nslzh\") pod \"61d15914-07c6-4782-b8e2-96ec816206fb\" (UID: \"61d15914-07c6-4782-b8e2-96ec816206fb\") " Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.025537 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61d15914-07c6-4782-b8e2-96ec816206fb-kube-api-access-nslzh" (OuterVolumeSpecName: "kube-api-access-nslzh") pod "61d15914-07c6-4782-b8e2-96ec816206fb" (UID: "61d15914-07c6-4782-b8e2-96ec816206fb"). InnerVolumeSpecName "kube-api-access-nslzh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.047144 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-inventory" (OuterVolumeSpecName: "inventory") pod "61d15914-07c6-4782-b8e2-96ec816206fb" (UID: "61d15914-07c6-4782-b8e2-96ec816206fb"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.047623 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61d15914-07c6-4782-b8e2-96ec816206fb" (UID: "61d15914-07c6-4782-b8e2-96ec816206fb"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.122511 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.122851 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61d15914-07c6-4782-b8e2-96ec816206fb-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.122871 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nslzh\" (UniqueName: \"kubernetes.io/projected/61d15914-07c6-4782-b8e2-96ec816206fb-kube-api-access-nslzh\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.138071 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm"] Dec 05 01:45:00 crc kubenswrapper[4665]: E1205 01:45:00.138533 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61d15914-07c6-4782-b8e2-96ec816206fb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.138560 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="61d15914-07c6-4782-b8e2-96ec816206fb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.138860 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="61d15914-07c6-4782-b8e2-96ec816206fb" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.139767 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.142181 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.142571 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.154813 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm"] Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.224038 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-secret-volume\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.224111 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcjq5\" (UniqueName: \"kubernetes.io/projected/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-kube-api-access-lcjq5\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.224140 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-config-volume\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.326575 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-secret-volume\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.326885 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcjq5\" (UniqueName: \"kubernetes.io/projected/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-kube-api-access-lcjq5\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.327015 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-config-volume\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.327787 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-config-volume\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.339115 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-secret-volume\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.347117 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcjq5\" (UniqueName: \"kubernetes.io/projected/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-kube-api-access-lcjq5\") pod \"collect-profiles-29414985-t5stm\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.403601 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" event={"ID":"61d15914-07c6-4782-b8e2-96ec816206fb","Type":"ContainerDied","Data":"6ef964fcb1d5c82af6a32da1984e3e9ae949d2b08f1ccfbc6d6da40cd5969aad"} Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.403654 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6ef964fcb1d5c82af6a32da1984e3e9ae949d2b08f1ccfbc6d6da40cd5969aad" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.403736 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.463045 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.522377 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp"] Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.523698 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.528567 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.528708 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.528816 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.528961 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.544732 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp"] Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.639429 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.639826 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr89p\" (UniqueName: \"kubernetes.io/projected/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-kube-api-access-pr89p\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.639873 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.741977 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.742093 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr89p\" (UniqueName: \"kubernetes.io/projected/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-kube-api-access-pr89p\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.742135 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.746449 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.746798 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.761767 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr89p\" (UniqueName: \"kubernetes.io/projected/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-kube-api-access-pr89p\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-5jtmp\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.860083 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:00 crc kubenswrapper[4665]: I1205 01:45:00.985561 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm"] Dec 05 01:45:01 crc kubenswrapper[4665]: I1205 01:45:01.225022 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp"] Dec 05 01:45:01 crc kubenswrapper[4665]: W1205 01:45:01.237305 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3966caf7_b7a8_4bd6_b4e3_e8ccbc5fce3d.slice/crio-62dcfcfdf338161c80fc8e61bad39d4a4d4e3184460260d0bb2e9da19074f639 WatchSource:0}: Error finding container 62dcfcfdf338161c80fc8e61bad39d4a4d4e3184460260d0bb2e9da19074f639: Status 404 returned error can't find the container with id 62dcfcfdf338161c80fc8e61bad39d4a4d4e3184460260d0bb2e9da19074f639 Dec 05 01:45:01 crc kubenswrapper[4665]: I1205 01:45:01.412685 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" event={"ID":"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d","Type":"ContainerStarted","Data":"62dcfcfdf338161c80fc8e61bad39d4a4d4e3184460260d0bb2e9da19074f639"} Dec 05 01:45:01 crc kubenswrapper[4665]: I1205 01:45:01.414706 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" event={"ID":"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06","Type":"ContainerStarted","Data":"c2bae8613bdef9ba607b5fb048aec9de981ed974d20479fe9ab53be84b6ac8ce"} Dec 05 01:45:01 crc kubenswrapper[4665]: I1205 01:45:01.414756 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" event={"ID":"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06","Type":"ContainerStarted","Data":"77e157706c03e79d707d930a20b02c5972ab996a57a05cbf67ace4d740b19c5d"} Dec 05 01:45:01 crc kubenswrapper[4665]: I1205 01:45:01.436918 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" podStartSLOduration=1.436903295 podStartE2EDuration="1.436903295s" podCreationTimestamp="2025-12-05 01:45:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 01:45:01.429191488 +0000 UTC m=+2076.768583807" watchObservedRunningTime="2025-12-05 01:45:01.436903295 +0000 UTC m=+2076.776295594" Dec 05 01:45:02 crc kubenswrapper[4665]: I1205 01:45:02.424686 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" event={"ID":"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d","Type":"ContainerStarted","Data":"d141a85fd577c4ecf8b6b68ac2bf2d3f9d898268642f9d03ff47cba1cedeb814"} Dec 05 01:45:02 crc kubenswrapper[4665]: I1205 01:45:02.426311 4665 generic.go:334] "Generic (PLEG): container finished" podID="8ef28b0f-5dfc-45d0-8c97-d7b77e103a06" containerID="c2bae8613bdef9ba607b5fb048aec9de981ed974d20479fe9ab53be84b6ac8ce" exitCode=0 Dec 05 01:45:02 crc kubenswrapper[4665]: I1205 01:45:02.426345 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" event={"ID":"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06","Type":"ContainerDied","Data":"c2bae8613bdef9ba607b5fb048aec9de981ed974d20479fe9ab53be84b6ac8ce"} Dec 05 01:45:02 crc kubenswrapper[4665]: I1205 01:45:02.471461 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" podStartSLOduration=2.285972859 podStartE2EDuration="2.471442666s" podCreationTimestamp="2025-12-05 01:45:00 +0000 UTC" firstStartedPulling="2025-12-05 01:45:01.239528258 +0000 UTC m=+2076.578920547" lastFinishedPulling="2025-12-05 01:45:01.424998055 +0000 UTC m=+2076.764390354" observedRunningTime="2025-12-05 01:45:02.454902932 +0000 UTC m=+2077.794295251" watchObservedRunningTime="2025-12-05 01:45:02.471442666 +0000 UTC m=+2077.810834975" Dec 05 01:45:03 crc kubenswrapper[4665]: I1205 01:45:03.851262 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:03 crc kubenswrapper[4665]: I1205 01:45:03.903645 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-config-volume\") pod \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " Dec 05 01:45:03 crc kubenswrapper[4665]: I1205 01:45:03.903728 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-secret-volume\") pod \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " Dec 05 01:45:03 crc kubenswrapper[4665]: I1205 01:45:03.903918 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcjq5\" (UniqueName: \"kubernetes.io/projected/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-kube-api-access-lcjq5\") pod \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\" (UID: \"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06\") " Dec 05 01:45:03 crc kubenswrapper[4665]: I1205 01:45:03.904494 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-config-volume" (OuterVolumeSpecName: "config-volume") pod "8ef28b0f-5dfc-45d0-8c97-d7b77e103a06" (UID: "8ef28b0f-5dfc-45d0-8c97-d7b77e103a06"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:45:03 crc kubenswrapper[4665]: I1205 01:45:03.910535 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-kube-api-access-lcjq5" (OuterVolumeSpecName: "kube-api-access-lcjq5") pod "8ef28b0f-5dfc-45d0-8c97-d7b77e103a06" (UID: "8ef28b0f-5dfc-45d0-8c97-d7b77e103a06"). InnerVolumeSpecName "kube-api-access-lcjq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:45:03 crc kubenswrapper[4665]: I1205 01:45:03.913108 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "8ef28b0f-5dfc-45d0-8c97-d7b77e103a06" (UID: "8ef28b0f-5dfc-45d0-8c97-d7b77e103a06"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.006862 4665 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.006908 4665 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.006928 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcjq5\" (UniqueName: \"kubernetes.io/projected/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06-kube-api-access-lcjq5\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.444193 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" event={"ID":"8ef28b0f-5dfc-45d0-8c97-d7b77e103a06","Type":"ContainerDied","Data":"77e157706c03e79d707d930a20b02c5972ab996a57a05cbf67ace4d740b19c5d"} Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.444229 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="77e157706c03e79d707d930a20b02c5972ab996a57a05cbf67ace4d740b19c5d" Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.444523 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm" Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.549855 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx"] Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.557638 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414940-rn5fx"] Dec 05 01:45:04 crc kubenswrapper[4665]: I1205 01:45:04.906592 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ebede5a-b1cb-4059-b158-ce9a85aed080" path="/var/lib/kubelet/pods/4ebede5a-b1cb-4059-b158-ce9a85aed080/volumes" Dec 05 01:45:12 crc kubenswrapper[4665]: I1205 01:45:12.084727 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-tdhhv"] Dec 05 01:45:12 crc kubenswrapper[4665]: I1205 01:45:12.097526 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-tdhhv"] Dec 05 01:45:12 crc kubenswrapper[4665]: I1205 01:45:12.904647 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="56a40c18-4bbe-4897-96ab-532ffb565321" path="/var/lib/kubelet/pods/56a40c18-4bbe-4897-96ab-532ffb565321/volumes" Dec 05 01:45:16 crc kubenswrapper[4665]: I1205 01:45:16.033535 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-lqq2r"] Dec 05 01:45:16 crc kubenswrapper[4665]: I1205 01:45:16.042538 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-lqq2r"] Dec 05 01:45:16 crc kubenswrapper[4665]: I1205 01:45:16.903913 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cffe2874-1254-4832-943d-59419d486859" path="/var/lib/kubelet/pods/cffe2874-1254-4832-943d-59419d486859/volumes" Dec 05 01:45:33 crc kubenswrapper[4665]: I1205 01:45:33.593079 4665 scope.go:117] "RemoveContainer" containerID="dffed07b73eb3577e3c4ba61d5e18d3b7b2328bda7e18e4fae8d6773e88002ad" Dec 05 01:45:33 crc kubenswrapper[4665]: I1205 01:45:33.634813 4665 scope.go:117] "RemoveContainer" containerID="d32d4c60f5a286b86e07ae60fbe95e3d9a21a73670e77f52117a1dc477781641" Dec 05 01:45:33 crc kubenswrapper[4665]: I1205 01:45:33.693118 4665 scope.go:117] "RemoveContainer" containerID="b60b82c009802106c1d1611ec6a894a7ac3c90c5fd92b97fad08d3a5a93f0cfa" Dec 05 01:45:44 crc kubenswrapper[4665]: I1205 01:45:44.922425 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:45:44 crc kubenswrapper[4665]: I1205 01:45:44.922936 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:45:45 crc kubenswrapper[4665]: I1205 01:45:45.788524 4665 generic.go:334] "Generic (PLEG): container finished" podID="3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d" containerID="d141a85fd577c4ecf8b6b68ac2bf2d3f9d898268642f9d03ff47cba1cedeb814" exitCode=0 Dec 05 01:45:45 crc kubenswrapper[4665]: I1205 01:45:45.788572 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" event={"ID":"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d","Type":"ContainerDied","Data":"d141a85fd577c4ecf8b6b68ac2bf2d3f9d898268642f9d03ff47cba1cedeb814"} Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.203063 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.342430 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-inventory\") pod \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.342795 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr89p\" (UniqueName: \"kubernetes.io/projected/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-kube-api-access-pr89p\") pod \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.342843 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-ssh-key\") pod \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\" (UID: \"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d\") " Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.348840 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-kube-api-access-pr89p" (OuterVolumeSpecName: "kube-api-access-pr89p") pod "3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d" (UID: "3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d"). InnerVolumeSpecName "kube-api-access-pr89p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.372575 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d" (UID: "3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.372908 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-inventory" (OuterVolumeSpecName: "inventory") pod "3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d" (UID: "3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.445563 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.445596 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.445606 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr89p\" (UniqueName: \"kubernetes.io/projected/3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d-kube-api-access-pr89p\") on node \"crc\" DevicePath \"\"" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.805406 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" event={"ID":"3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d","Type":"ContainerDied","Data":"62dcfcfdf338161c80fc8e61bad39d4a4d4e3184460260d0bb2e9da19074f639"} Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.805447 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="62dcfcfdf338161c80fc8e61bad39d4a4d4e3184460260d0bb2e9da19074f639" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.805446 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-5jtmp" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.897420 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8"] Dec 05 01:45:47 crc kubenswrapper[4665]: E1205 01:45:47.897814 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.897831 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:45:47 crc kubenswrapper[4665]: E1205 01:45:47.897869 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ef28b0f-5dfc-45d0-8c97-d7b77e103a06" containerName="collect-profiles" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.897875 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ef28b0f-5dfc-45d0-8c97-d7b77e103a06" containerName="collect-profiles" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.898048 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.898075 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ef28b0f-5dfc-45d0-8c97-d7b77e103a06" containerName="collect-profiles" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.898697 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.902705 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.902972 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.903504 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.911560 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8"] Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.912446 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.953452 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.953529 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qqdzq\" (UniqueName: \"kubernetes.io/projected/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-kube-api-access-qqdzq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:47 crc kubenswrapper[4665]: I1205 01:45:47.953829 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.054471 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.054525 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.054555 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qqdzq\" (UniqueName: \"kubernetes.io/projected/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-kube-api-access-qqdzq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.058530 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.059607 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.081220 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qqdzq\" (UniqueName: \"kubernetes.io/projected/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-kube-api-access-qqdzq\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.215650 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.700139 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8"] Dec 05 01:45:48 crc kubenswrapper[4665]: W1205 01:45:48.717594 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf6b83f4a_6910_44b3_9fca_b9b455cc3d97.slice/crio-0eb449c5bd7a590076fd6eb4db06b819fdf113509d5850b8d95cd7fada1d3973 WatchSource:0}: Error finding container 0eb449c5bd7a590076fd6eb4db06b819fdf113509d5850b8d95cd7fada1d3973: Status 404 returned error can't find the container with id 0eb449c5bd7a590076fd6eb4db06b819fdf113509d5850b8d95cd7fada1d3973 Dec 05 01:45:48 crc kubenswrapper[4665]: I1205 01:45:48.815003 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" event={"ID":"f6b83f4a-6910-44b3-9fca-b9b455cc3d97","Type":"ContainerStarted","Data":"0eb449c5bd7a590076fd6eb4db06b819fdf113509d5850b8d95cd7fada1d3973"} Dec 05 01:45:49 crc kubenswrapper[4665]: I1205 01:45:49.827953 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" event={"ID":"f6b83f4a-6910-44b3-9fca-b9b455cc3d97","Type":"ContainerStarted","Data":"5a4007b3b87d7890624ab1a427c679ab44ece0fc3be9ddbaf59b6137d15daff8"} Dec 05 01:45:49 crc kubenswrapper[4665]: I1205 01:45:49.868006 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" podStartSLOduration=2.682576055 podStartE2EDuration="2.867988541s" podCreationTimestamp="2025-12-05 01:45:47 +0000 UTC" firstStartedPulling="2025-12-05 01:45:48.721027177 +0000 UTC m=+2124.060419476" lastFinishedPulling="2025-12-05 01:45:48.906439673 +0000 UTC m=+2124.245831962" observedRunningTime="2025-12-05 01:45:49.845437781 +0000 UTC m=+2125.184830080" watchObservedRunningTime="2025-12-05 01:45:49.867988541 +0000 UTC m=+2125.207380840" Dec 05 01:45:54 crc kubenswrapper[4665]: I1205 01:45:54.047947 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-lljbq"] Dec 05 01:45:54 crc kubenswrapper[4665]: I1205 01:45:54.057816 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-lljbq"] Dec 05 01:45:54 crc kubenswrapper[4665]: I1205 01:45:54.903273 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="109c1163-805a-4399-b593-66f29ff2046c" path="/var/lib/kubelet/pods/109c1163-805a-4399-b593-66f29ff2046c/volumes" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.031770 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-r4k8p"] Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.033902 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.044790 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r4k8p"] Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.114898 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xbghh\" (UniqueName: \"kubernetes.io/projected/6440e879-37bb-4d71-83cc-57311d2c44a6-kube-api-access-xbghh\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.114989 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-utilities\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.115040 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-catalog-content\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.216386 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xbghh\" (UniqueName: \"kubernetes.io/projected/6440e879-37bb-4d71-83cc-57311d2c44a6-kube-api-access-xbghh\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.216473 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-utilities\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.216501 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-catalog-content\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.217247 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-catalog-content\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.218575 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-utilities\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.237046 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xbghh\" (UniqueName: \"kubernetes.io/projected/6440e879-37bb-4d71-83cc-57311d2c44a6-kube-api-access-xbghh\") pod \"redhat-operators-r4k8p\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.357759 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.828652 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-r4k8p"] Dec 05 01:45:57 crc kubenswrapper[4665]: I1205 01:45:57.899675 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4k8p" event={"ID":"6440e879-37bb-4d71-83cc-57311d2c44a6","Type":"ContainerStarted","Data":"0b81050a4f7af8fa300cb0f158b3cd5a8299d21ce4c6f44ed986211decb7a772"} Dec 05 01:45:58 crc kubenswrapper[4665]: I1205 01:45:58.908057 4665 generic.go:334] "Generic (PLEG): container finished" podID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerID="7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a" exitCode=0 Dec 05 01:45:58 crc kubenswrapper[4665]: I1205 01:45:58.908429 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4k8p" event={"ID":"6440e879-37bb-4d71-83cc-57311d2c44a6","Type":"ContainerDied","Data":"7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a"} Dec 05 01:46:00 crc kubenswrapper[4665]: I1205 01:46:00.928002 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4k8p" event={"ID":"6440e879-37bb-4d71-83cc-57311d2c44a6","Type":"ContainerStarted","Data":"4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0"} Dec 05 01:46:03 crc kubenswrapper[4665]: I1205 01:46:03.974326 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4k8p" event={"ID":"6440e879-37bb-4d71-83cc-57311d2c44a6","Type":"ContainerDied","Data":"4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0"} Dec 05 01:46:03 crc kubenswrapper[4665]: I1205 01:46:03.974355 4665 generic.go:334] "Generic (PLEG): container finished" podID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerID="4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0" exitCode=0 Dec 05 01:46:03 crc kubenswrapper[4665]: I1205 01:46:03.977016 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 01:46:04 crc kubenswrapper[4665]: I1205 01:46:04.985252 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4k8p" event={"ID":"6440e879-37bb-4d71-83cc-57311d2c44a6","Type":"ContainerStarted","Data":"837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f"} Dec 05 01:46:07 crc kubenswrapper[4665]: I1205 01:46:07.358015 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:46:07 crc kubenswrapper[4665]: I1205 01:46:07.358371 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:46:08 crc kubenswrapper[4665]: I1205 01:46:08.399414 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-r4k8p" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="registry-server" probeResult="failure" output=< Dec 05 01:46:08 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 01:46:08 crc kubenswrapper[4665]: > Dec 05 01:46:14 crc kubenswrapper[4665]: I1205 01:46:14.923519 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:46:14 crc kubenswrapper[4665]: I1205 01:46:14.923895 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:46:17 crc kubenswrapper[4665]: I1205 01:46:17.402568 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:46:17 crc kubenswrapper[4665]: I1205 01:46:17.441784 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-r4k8p" podStartSLOduration=14.993375329 podStartE2EDuration="20.44176628s" podCreationTimestamp="2025-12-05 01:45:57 +0000 UTC" firstStartedPulling="2025-12-05 01:45:58.910522137 +0000 UTC m=+2134.249914436" lastFinishedPulling="2025-12-05 01:46:04.358913088 +0000 UTC m=+2139.698305387" observedRunningTime="2025-12-05 01:46:05.006663109 +0000 UTC m=+2140.346055408" watchObservedRunningTime="2025-12-05 01:46:17.44176628 +0000 UTC m=+2152.781158579" Dec 05 01:46:17 crc kubenswrapper[4665]: I1205 01:46:17.479823 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:46:17 crc kubenswrapper[4665]: I1205 01:46:17.653856 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r4k8p"] Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.108983 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-r4k8p" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="registry-server" containerID="cri-o://837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f" gracePeriod=2 Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.667384 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.734208 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xbghh\" (UniqueName: \"kubernetes.io/projected/6440e879-37bb-4d71-83cc-57311d2c44a6-kube-api-access-xbghh\") pod \"6440e879-37bb-4d71-83cc-57311d2c44a6\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.734283 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-utilities\") pod \"6440e879-37bb-4d71-83cc-57311d2c44a6\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.734339 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-catalog-content\") pod \"6440e879-37bb-4d71-83cc-57311d2c44a6\" (UID: \"6440e879-37bb-4d71-83cc-57311d2c44a6\") " Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.735408 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-utilities" (OuterVolumeSpecName: "utilities") pod "6440e879-37bb-4d71-83cc-57311d2c44a6" (UID: "6440e879-37bb-4d71-83cc-57311d2c44a6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.740397 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6440e879-37bb-4d71-83cc-57311d2c44a6-kube-api-access-xbghh" (OuterVolumeSpecName: "kube-api-access-xbghh") pod "6440e879-37bb-4d71-83cc-57311d2c44a6" (UID: "6440e879-37bb-4d71-83cc-57311d2c44a6"). InnerVolumeSpecName "kube-api-access-xbghh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.837415 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xbghh\" (UniqueName: \"kubernetes.io/projected/6440e879-37bb-4d71-83cc-57311d2c44a6-kube-api-access-xbghh\") on node \"crc\" DevicePath \"\"" Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.837451 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.848494 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6440e879-37bb-4d71-83cc-57311d2c44a6" (UID: "6440e879-37bb-4d71-83cc-57311d2c44a6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:46:19 crc kubenswrapper[4665]: I1205 01:46:19.940880 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6440e879-37bb-4d71-83cc-57311d2c44a6-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.117341 4665 generic.go:334] "Generic (PLEG): container finished" podID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerID="837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f" exitCode=0 Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.117701 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4k8p" event={"ID":"6440e879-37bb-4d71-83cc-57311d2c44a6","Type":"ContainerDied","Data":"837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f"} Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.117729 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-r4k8p" event={"ID":"6440e879-37bb-4d71-83cc-57311d2c44a6","Type":"ContainerDied","Data":"0b81050a4f7af8fa300cb0f158b3cd5a8299d21ce4c6f44ed986211decb7a772"} Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.117748 4665 scope.go:117] "RemoveContainer" containerID="837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.117880 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-r4k8p" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.147813 4665 scope.go:117] "RemoveContainer" containerID="4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.152605 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-r4k8p"] Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.168129 4665 scope.go:117] "RemoveContainer" containerID="7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.181597 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-r4k8p"] Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.228203 4665 scope.go:117] "RemoveContainer" containerID="837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f" Dec 05 01:46:20 crc kubenswrapper[4665]: E1205 01:46:20.230390 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f\": container with ID starting with 837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f not found: ID does not exist" containerID="837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.230441 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f"} err="failed to get container status \"837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f\": rpc error: code = NotFound desc = could not find container \"837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f\": container with ID starting with 837b2bcfa68a9772420f3e220d1eb1f4347ed0a3ba709233daee4a80a146010f not found: ID does not exist" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.230468 4665 scope.go:117] "RemoveContainer" containerID="4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0" Dec 05 01:46:20 crc kubenswrapper[4665]: E1205 01:46:20.232589 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0\": container with ID starting with 4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0 not found: ID does not exist" containerID="4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.232716 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0"} err="failed to get container status \"4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0\": rpc error: code = NotFound desc = could not find container \"4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0\": container with ID starting with 4da23bf4fef0dbe05968a756cb5fc1abadf24a9a55cec2e2511eec2d7cca46b0 not found: ID does not exist" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.232826 4665 scope.go:117] "RemoveContainer" containerID="7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a" Dec 05 01:46:20 crc kubenswrapper[4665]: E1205 01:46:20.233485 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a\": container with ID starting with 7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a not found: ID does not exist" containerID="7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.233519 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a"} err="failed to get container status \"7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a\": rpc error: code = NotFound desc = could not find container \"7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a\": container with ID starting with 7bdb2aba673413cfeafcb301328da8c6bbf4c6904cf5a1a873ef1bf8e3e6281a not found: ID does not exist" Dec 05 01:46:20 crc kubenswrapper[4665]: I1205 01:46:20.924313 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" path="/var/lib/kubelet/pods/6440e879-37bb-4d71-83cc-57311d2c44a6/volumes" Dec 05 01:46:33 crc kubenswrapper[4665]: I1205 01:46:33.789101 4665 scope.go:117] "RemoveContainer" containerID="7f3850628abfa8cfcd2eb864137177dc10758be1f6ce19b53bbc65e712ca5ef5" Dec 05 01:46:44 crc kubenswrapper[4665]: I1205 01:46:44.929323 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:46:44 crc kubenswrapper[4665]: I1205 01:46:44.930486 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:46:44 crc kubenswrapper[4665]: I1205 01:46:44.930564 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:46:44 crc kubenswrapper[4665]: I1205 01:46:44.931771 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"5c570a9be3cb99220cfff24bbdba7c2a172ecd2885266f83ae0d281c3e6b2ef1"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:46:44 crc kubenswrapper[4665]: I1205 01:46:44.931856 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://5c570a9be3cb99220cfff24bbdba7c2a172ecd2885266f83ae0d281c3e6b2ef1" gracePeriod=600 Dec 05 01:46:45 crc kubenswrapper[4665]: I1205 01:46:45.783768 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="5c570a9be3cb99220cfff24bbdba7c2a172ecd2885266f83ae0d281c3e6b2ef1" exitCode=0 Dec 05 01:46:45 crc kubenswrapper[4665]: I1205 01:46:45.783850 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"5c570a9be3cb99220cfff24bbdba7c2a172ecd2885266f83ae0d281c3e6b2ef1"} Dec 05 01:46:45 crc kubenswrapper[4665]: I1205 01:46:45.784100 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca"} Dec 05 01:46:45 crc kubenswrapper[4665]: I1205 01:46:45.784128 4665 scope.go:117] "RemoveContainer" containerID="1ad6e695faae7c088cdcbead4fd88d1e2015bf6023ca21fb6234c9ab02280c25" Dec 05 01:46:47 crc kubenswrapper[4665]: I1205 01:46:47.803122 4665 generic.go:334] "Generic (PLEG): container finished" podID="f6b83f4a-6910-44b3-9fca-b9b455cc3d97" containerID="5a4007b3b87d7890624ab1a427c679ab44ece0fc3be9ddbaf59b6137d15daff8" exitCode=0 Dec 05 01:46:47 crc kubenswrapper[4665]: I1205 01:46:47.803534 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" event={"ID":"f6b83f4a-6910-44b3-9fca-b9b455cc3d97","Type":"ContainerDied","Data":"5a4007b3b87d7890624ab1a427c679ab44ece0fc3be9ddbaf59b6137d15daff8"} Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.181651 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.193205 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-inventory\") pod \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.193363 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qqdzq\" (UniqueName: \"kubernetes.io/projected/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-kube-api-access-qqdzq\") pod \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.193526 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-ssh-key\") pod \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\" (UID: \"f6b83f4a-6910-44b3-9fca-b9b455cc3d97\") " Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.227128 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-kube-api-access-qqdzq" (OuterVolumeSpecName: "kube-api-access-qqdzq") pod "f6b83f4a-6910-44b3-9fca-b9b455cc3d97" (UID: "f6b83f4a-6910-44b3-9fca-b9b455cc3d97"). InnerVolumeSpecName "kube-api-access-qqdzq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.245453 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f6b83f4a-6910-44b3-9fca-b9b455cc3d97" (UID: "f6b83f4a-6910-44b3-9fca-b9b455cc3d97"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.263903 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-inventory" (OuterVolumeSpecName: "inventory") pod "f6b83f4a-6910-44b3-9fca-b9b455cc3d97" (UID: "f6b83f4a-6910-44b3-9fca-b9b455cc3d97"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.313558 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qqdzq\" (UniqueName: \"kubernetes.io/projected/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-kube-api-access-qqdzq\") on node \"crc\" DevicePath \"\"" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.313600 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.313609 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f6b83f4a-6910-44b3-9fca-b9b455cc3d97-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.820115 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" event={"ID":"f6b83f4a-6910-44b3-9fca-b9b455cc3d97","Type":"ContainerDied","Data":"0eb449c5bd7a590076fd6eb4db06b819fdf113509d5850b8d95cd7fada1d3973"} Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.820467 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0eb449c5bd7a590076fd6eb4db06b819fdf113509d5850b8d95cd7fada1d3973" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.820181 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.953532 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-nx4nq"] Dec 05 01:46:49 crc kubenswrapper[4665]: E1205 01:46:49.954010 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f6b83f4a-6910-44b3-9fca-b9b455cc3d97" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.954038 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f6b83f4a-6910-44b3-9fca-b9b455cc3d97" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:46:49 crc kubenswrapper[4665]: E1205 01:46:49.954066 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="extract-content" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.954076 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="extract-content" Dec 05 01:46:49 crc kubenswrapper[4665]: E1205 01:46:49.954093 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="extract-utilities" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.954101 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="extract-utilities" Dec 05 01:46:49 crc kubenswrapper[4665]: E1205 01:46:49.954119 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="registry-server" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.954128 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="registry-server" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.954370 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f6b83f4a-6910-44b3-9fca-b9b455cc3d97" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.954411 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="6440e879-37bb-4d71-83cc-57311d2c44a6" containerName="registry-server" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.955400 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.967095 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.967171 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.967397 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-nx4nq"] Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.967526 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:46:49 crc kubenswrapper[4665]: I1205 01:46:49.968660 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.024369 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.024524 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q56tp\" (UniqueName: \"kubernetes.io/projected/0e47d679-dd01-454e-8ae4-887d6d676d15-kube-api-access-q56tp\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.024674 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.125746 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q56tp\" (UniqueName: \"kubernetes.io/projected/0e47d679-dd01-454e-8ae4-887d6d676d15-kube-api-access-q56tp\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.125806 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.125935 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.130549 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.130701 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.147298 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q56tp\" (UniqueName: \"kubernetes.io/projected/0e47d679-dd01-454e-8ae4-887d6d676d15-kube-api-access-q56tp\") pod \"ssh-known-hosts-edpm-deployment-nx4nq\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.271665 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.800460 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-nx4nq"] Dec 05 01:46:50 crc kubenswrapper[4665]: I1205 01:46:50.833434 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" event={"ID":"0e47d679-dd01-454e-8ae4-887d6d676d15","Type":"ContainerStarted","Data":"42880f63eb3f7f2526583b9046e44480c50f5f5ad15d84d94fbbfbd524e66c0e"} Dec 05 01:46:51 crc kubenswrapper[4665]: I1205 01:46:51.842060 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" event={"ID":"0e47d679-dd01-454e-8ae4-887d6d676d15","Type":"ContainerStarted","Data":"a35d940f7dcf42fb83f17dc856eca6bf6b46c5e237ee0432e26c909364c43601"} Dec 05 01:46:51 crc kubenswrapper[4665]: I1205 01:46:51.861955 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" podStartSLOduration=2.6834290149999998 podStartE2EDuration="2.861936492s" podCreationTimestamp="2025-12-05 01:46:49 +0000 UTC" firstStartedPulling="2025-12-05 01:46:50.805106428 +0000 UTC m=+2186.144498787" lastFinishedPulling="2025-12-05 01:46:50.983613945 +0000 UTC m=+2186.323006264" observedRunningTime="2025-12-05 01:46:51.859994855 +0000 UTC m=+2187.199387154" watchObservedRunningTime="2025-12-05 01:46:51.861936492 +0000 UTC m=+2187.201328791" Dec 05 01:46:58 crc kubenswrapper[4665]: I1205 01:46:58.902470 4665 generic.go:334] "Generic (PLEG): container finished" podID="0e47d679-dd01-454e-8ae4-887d6d676d15" containerID="a35d940f7dcf42fb83f17dc856eca6bf6b46c5e237ee0432e26c909364c43601" exitCode=0 Dec 05 01:46:58 crc kubenswrapper[4665]: I1205 01:46:58.906427 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" event={"ID":"0e47d679-dd01-454e-8ae4-887d6d676d15","Type":"ContainerDied","Data":"a35d940f7dcf42fb83f17dc856eca6bf6b46c5e237ee0432e26c909364c43601"} Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.352689 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.436135 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q56tp\" (UniqueName: \"kubernetes.io/projected/0e47d679-dd01-454e-8ae4-887d6d676d15-kube-api-access-q56tp\") pod \"0e47d679-dd01-454e-8ae4-887d6d676d15\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.436286 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-ssh-key-openstack-edpm-ipam\") pod \"0e47d679-dd01-454e-8ae4-887d6d676d15\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.436389 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-inventory-0\") pod \"0e47d679-dd01-454e-8ae4-887d6d676d15\" (UID: \"0e47d679-dd01-454e-8ae4-887d6d676d15\") " Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.452745 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e47d679-dd01-454e-8ae4-887d6d676d15-kube-api-access-q56tp" (OuterVolumeSpecName: "kube-api-access-q56tp") pod "0e47d679-dd01-454e-8ae4-887d6d676d15" (UID: "0e47d679-dd01-454e-8ae4-887d6d676d15"). InnerVolumeSpecName "kube-api-access-q56tp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.465704 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "0e47d679-dd01-454e-8ae4-887d6d676d15" (UID: "0e47d679-dd01-454e-8ae4-887d6d676d15"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.473235 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "0e47d679-dd01-454e-8ae4-887d6d676d15" (UID: "0e47d679-dd01-454e-8ae4-887d6d676d15"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.538422 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.538478 4665 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/0e47d679-dd01-454e-8ae4-887d6d676d15-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.538493 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q56tp\" (UniqueName: \"kubernetes.io/projected/0e47d679-dd01-454e-8ae4-887d6d676d15-kube-api-access-q56tp\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.923454 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" event={"ID":"0e47d679-dd01-454e-8ae4-887d6d676d15","Type":"ContainerDied","Data":"42880f63eb3f7f2526583b9046e44480c50f5f5ad15d84d94fbbfbd524e66c0e"} Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.923966 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="42880f63eb3f7f2526583b9046e44480c50f5f5ad15d84d94fbbfbd524e66c0e" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.923595 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-nx4nq" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.995274 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25"] Dec 05 01:47:00 crc kubenswrapper[4665]: E1205 01:47:00.995639 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e47d679-dd01-454e-8ae4-887d6d676d15" containerName="ssh-known-hosts-edpm-deployment" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.995655 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e47d679-dd01-454e-8ae4-887d6d676d15" containerName="ssh-known-hosts-edpm-deployment" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.995839 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e47d679-dd01-454e-8ae4-887d6d676d15" containerName="ssh-known-hosts-edpm-deployment" Dec 05 01:47:00 crc kubenswrapper[4665]: I1205 01:47:00.999430 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.003691 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.003902 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.006376 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.006597 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.016307 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25"] Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.051375 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.051445 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.051521 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgqgv\" (UniqueName: \"kubernetes.io/projected/66505ea7-937f-4f07-b036-afca1adc368c-kube-api-access-xgqgv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.153470 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.153537 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.153608 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgqgv\" (UniqueName: \"kubernetes.io/projected/66505ea7-937f-4f07-b036-afca1adc368c-kube-api-access-xgqgv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.157759 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.158579 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.173357 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgqgv\" (UniqueName: \"kubernetes.io/projected/66505ea7-937f-4f07-b036-afca1adc368c-kube-api-access-xgqgv\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-8fk25\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.324271 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.850437 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25"] Dec 05 01:47:01 crc kubenswrapper[4665]: I1205 01:47:01.933932 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" event={"ID":"66505ea7-937f-4f07-b036-afca1adc368c","Type":"ContainerStarted","Data":"7ea983093eb5a6c3a39a48669d67eecfa711e6398315b06276e7a21322fea985"} Dec 05 01:47:02 crc kubenswrapper[4665]: I1205 01:47:02.942571 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" event={"ID":"66505ea7-937f-4f07-b036-afca1adc368c","Type":"ContainerStarted","Data":"cf45c05d82229f07e48ca8c662b4438adcfe633c42d379791ccc850fea218d83"} Dec 05 01:47:02 crc kubenswrapper[4665]: I1205 01:47:02.962101 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" podStartSLOduration=2.7347265099999998 podStartE2EDuration="2.96207736s" podCreationTimestamp="2025-12-05 01:47:00 +0000 UTC" firstStartedPulling="2025-12-05 01:47:01.867208827 +0000 UTC m=+2197.206601126" lastFinishedPulling="2025-12-05 01:47:02.094559677 +0000 UTC m=+2197.433951976" observedRunningTime="2025-12-05 01:47:02.955817728 +0000 UTC m=+2198.295210037" watchObservedRunningTime="2025-12-05 01:47:02.96207736 +0000 UTC m=+2198.301469659" Dec 05 01:47:11 crc kubenswrapper[4665]: I1205 01:47:11.011709 4665 generic.go:334] "Generic (PLEG): container finished" podID="66505ea7-937f-4f07-b036-afca1adc368c" containerID="cf45c05d82229f07e48ca8c662b4438adcfe633c42d379791ccc850fea218d83" exitCode=0 Dec 05 01:47:11 crc kubenswrapper[4665]: I1205 01:47:11.011808 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" event={"ID":"66505ea7-937f-4f07-b036-afca1adc368c","Type":"ContainerDied","Data":"cf45c05d82229f07e48ca8c662b4438adcfe633c42d379791ccc850fea218d83"} Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.379505 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.398681 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-ssh-key\") pod \"66505ea7-937f-4f07-b036-afca1adc368c\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.398759 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgqgv\" (UniqueName: \"kubernetes.io/projected/66505ea7-937f-4f07-b036-afca1adc368c-kube-api-access-xgqgv\") pod \"66505ea7-937f-4f07-b036-afca1adc368c\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.398848 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-inventory\") pod \"66505ea7-937f-4f07-b036-afca1adc368c\" (UID: \"66505ea7-937f-4f07-b036-afca1adc368c\") " Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.419165 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/66505ea7-937f-4f07-b036-afca1adc368c-kube-api-access-xgqgv" (OuterVolumeSpecName: "kube-api-access-xgqgv") pod "66505ea7-937f-4f07-b036-afca1adc368c" (UID: "66505ea7-937f-4f07-b036-afca1adc368c"). InnerVolumeSpecName "kube-api-access-xgqgv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.437682 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "66505ea7-937f-4f07-b036-afca1adc368c" (UID: "66505ea7-937f-4f07-b036-afca1adc368c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.454230 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-inventory" (OuterVolumeSpecName: "inventory") pod "66505ea7-937f-4f07-b036-afca1adc368c" (UID: "66505ea7-937f-4f07-b036-afca1adc368c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.501802 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.501841 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgqgv\" (UniqueName: \"kubernetes.io/projected/66505ea7-937f-4f07-b036-afca1adc368c-kube-api-access-xgqgv\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:12 crc kubenswrapper[4665]: I1205 01:47:12.501856 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/66505ea7-937f-4f07-b036-afca1adc368c-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.030668 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" event={"ID":"66505ea7-937f-4f07-b036-afca1adc368c","Type":"ContainerDied","Data":"7ea983093eb5a6c3a39a48669d67eecfa711e6398315b06276e7a21322fea985"} Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.030727 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-8fk25" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.030755 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ea983093eb5a6c3a39a48669d67eecfa711e6398315b06276e7a21322fea985" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.123645 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77"] Dec 05 01:47:13 crc kubenswrapper[4665]: E1205 01:47:13.124168 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="66505ea7-937f-4f07-b036-afca1adc368c" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.124189 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="66505ea7-937f-4f07-b036-afca1adc368c" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.124434 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="66505ea7-937f-4f07-b036-afca1adc368c" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.125206 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.127949 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.130665 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.130770 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.130949 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.139105 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77"] Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.213405 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4sbsr\" (UniqueName: \"kubernetes.io/projected/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-kube-api-access-4sbsr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.213466 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.213512 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.314878 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.315044 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4sbsr\" (UniqueName: \"kubernetes.io/projected/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-kube-api-access-4sbsr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.315063 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.319126 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.322929 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.329591 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4sbsr\" (UniqueName: \"kubernetes.io/projected/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-kube-api-access-4sbsr\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.441004 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:13 crc kubenswrapper[4665]: I1205 01:47:13.970698 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77"] Dec 05 01:47:14 crc kubenswrapper[4665]: I1205 01:47:14.042173 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" event={"ID":"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf","Type":"ContainerStarted","Data":"7819d06d0b210f236c31ffeed3112a81b97d33a6ba4c8dd3bdba9e6926963dec"} Dec 05 01:47:15 crc kubenswrapper[4665]: I1205 01:47:15.050481 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" event={"ID":"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf","Type":"ContainerStarted","Data":"bd03edb2b81f60b4b45c7dd7c521df0d234a446ba2a9ddd5f0b577aae76ccc7d"} Dec 05 01:47:15 crc kubenswrapper[4665]: I1205 01:47:15.074278 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" podStartSLOduration=1.89577551 podStartE2EDuration="2.074259135s" podCreationTimestamp="2025-12-05 01:47:13 +0000 UTC" firstStartedPulling="2025-12-05 01:47:13.969404092 +0000 UTC m=+2209.308796391" lastFinishedPulling="2025-12-05 01:47:14.147887727 +0000 UTC m=+2209.487280016" observedRunningTime="2025-12-05 01:47:15.062798466 +0000 UTC m=+2210.402190765" watchObservedRunningTime="2025-12-05 01:47:15.074259135 +0000 UTC m=+2210.413651434" Dec 05 01:47:24 crc kubenswrapper[4665]: I1205 01:47:24.133164 4665 generic.go:334] "Generic (PLEG): container finished" podID="e27c2d20-292e-4f38-8fb9-8addf5cb5ebf" containerID="bd03edb2b81f60b4b45c7dd7c521df0d234a446ba2a9ddd5f0b577aae76ccc7d" exitCode=0 Dec 05 01:47:24 crc kubenswrapper[4665]: I1205 01:47:24.133217 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" event={"ID":"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf","Type":"ContainerDied","Data":"bd03edb2b81f60b4b45c7dd7c521df0d234a446ba2a9ddd5f0b577aae76ccc7d"} Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.593991 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.750541 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4sbsr\" (UniqueName: \"kubernetes.io/projected/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-kube-api-access-4sbsr\") pod \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.750733 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-ssh-key\") pod \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.750817 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-inventory\") pod \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\" (UID: \"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf\") " Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.761762 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-kube-api-access-4sbsr" (OuterVolumeSpecName: "kube-api-access-4sbsr") pod "e27c2d20-292e-4f38-8fb9-8addf5cb5ebf" (UID: "e27c2d20-292e-4f38-8fb9-8addf5cb5ebf"). InnerVolumeSpecName "kube-api-access-4sbsr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.778927 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-inventory" (OuterVolumeSpecName: "inventory") pod "e27c2d20-292e-4f38-8fb9-8addf5cb5ebf" (UID: "e27c2d20-292e-4f38-8fb9-8addf5cb5ebf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.781483 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "e27c2d20-292e-4f38-8fb9-8addf5cb5ebf" (UID: "e27c2d20-292e-4f38-8fb9-8addf5cb5ebf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.855305 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4sbsr\" (UniqueName: \"kubernetes.io/projected/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-kube-api-access-4sbsr\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.855544 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:25 crc kubenswrapper[4665]: I1205 01:47:25.855608 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/e27c2d20-292e-4f38-8fb9-8addf5cb5ebf-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.153021 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" event={"ID":"e27c2d20-292e-4f38-8fb9-8addf5cb5ebf","Type":"ContainerDied","Data":"7819d06d0b210f236c31ffeed3112a81b97d33a6ba4c8dd3bdba9e6926963dec"} Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.153115 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7819d06d0b210f236c31ffeed3112a81b97d33a6ba4c8dd3bdba9e6926963dec" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.153056 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.270980 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd"] Dec 05 01:47:26 crc kubenswrapper[4665]: E1205 01:47:26.272506 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e27c2d20-292e-4f38-8fb9-8addf5cb5ebf" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.272540 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e27c2d20-292e-4f38-8fb9-8addf5cb5ebf" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.273216 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="e27c2d20-292e-4f38-8fb9-8addf5cb5ebf" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.283427 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.285435 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.287904 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.289498 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.289696 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.292625 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd"] Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.300611 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.300839 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.301069 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.301228 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.367560 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.367614 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.367650 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.367688 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.367818 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.367883 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.367909 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.368141 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.368272 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.368311 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.368382 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.368427 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.368534 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fgdqd\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-kube-api-access-fgdqd\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.368565 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.470713 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fgdqd\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-kube-api-access-fgdqd\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.470769 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.470833 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.470860 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.470885 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.470945 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.470970 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.471014 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.471031 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.471080 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.471127 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.471172 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.471200 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.476458 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.477391 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.477444 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.479121 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.479341 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.479854 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.480329 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.480914 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.481303 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.481622 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.482014 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.483272 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.484051 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.485606 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.487723 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fgdqd\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-kube-api-access-fgdqd\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:26 crc kubenswrapper[4665]: I1205 01:47:26.615383 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:47:27 crc kubenswrapper[4665]: I1205 01:47:27.186860 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd"] Dec 05 01:47:27 crc kubenswrapper[4665]: W1205 01:47:27.202469 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podcaadc290_7caf_4c1f_8a2e_4c2b275e572b.slice/crio-a83ae6a7c33732abd21fe13e161e7941f2847f4fbdc5b6cec7cdfe2e4767c167 WatchSource:0}: Error finding container a83ae6a7c33732abd21fe13e161e7941f2847f4fbdc5b6cec7cdfe2e4767c167: Status 404 returned error can't find the container with id a83ae6a7c33732abd21fe13e161e7941f2847f4fbdc5b6cec7cdfe2e4767c167 Dec 05 01:47:28 crc kubenswrapper[4665]: I1205 01:47:28.174151 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" event={"ID":"caadc290-7caf-4c1f-8a2e-4c2b275e572b","Type":"ContainerStarted","Data":"491e2ac7f73b08d1bee57a2ee326a31524b91bd9b5c2c9da5f3d9ab7f3624182"} Dec 05 01:47:28 crc kubenswrapper[4665]: I1205 01:47:28.174791 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" event={"ID":"caadc290-7caf-4c1f-8a2e-4c2b275e572b","Type":"ContainerStarted","Data":"a83ae6a7c33732abd21fe13e161e7941f2847f4fbdc5b6cec7cdfe2e4767c167"} Dec 05 01:47:28 crc kubenswrapper[4665]: I1205 01:47:28.208571 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" podStartSLOduration=2.029195668 podStartE2EDuration="2.208547275s" podCreationTimestamp="2025-12-05 01:47:26 +0000 UTC" firstStartedPulling="2025-12-05 01:47:27.206881214 +0000 UTC m=+2222.546273513" lastFinishedPulling="2025-12-05 01:47:27.386232821 +0000 UTC m=+2222.725625120" observedRunningTime="2025-12-05 01:47:28.199120955 +0000 UTC m=+2223.538513264" watchObservedRunningTime="2025-12-05 01:47:28.208547275 +0000 UTC m=+2223.547939584" Dec 05 01:48:10 crc kubenswrapper[4665]: I1205 01:48:10.532811 4665 generic.go:334] "Generic (PLEG): container finished" podID="caadc290-7caf-4c1f-8a2e-4c2b275e572b" containerID="491e2ac7f73b08d1bee57a2ee326a31524b91bd9b5c2c9da5f3d9ab7f3624182" exitCode=0 Dec 05 01:48:10 crc kubenswrapper[4665]: I1205 01:48:10.532901 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" event={"ID":"caadc290-7caf-4c1f-8a2e-4c2b275e572b","Type":"ContainerDied","Data":"491e2ac7f73b08d1bee57a2ee326a31524b91bd9b5c2c9da5f3d9ab7f3624182"} Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.001926 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160546 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ovn-combined-ca-bundle\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160602 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-telemetry-combined-ca-bundle\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160625 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160686 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fgdqd\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-kube-api-access-fgdqd\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160724 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-nova-combined-ca-bundle\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160754 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-repo-setup-combined-ca-bundle\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160788 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-inventory\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160811 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160884 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-ovn-default-certs-0\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.160977 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.161008 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-bootstrap-combined-ca-bundle\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.161028 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-libvirt-combined-ca-bundle\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.161048 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-neutron-metadata-combined-ca-bundle\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.161112 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ssh-key\") pod \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\" (UID: \"caadc290-7caf-4c1f-8a2e-4c2b275e572b\") " Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.169355 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.169418 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-kube-api-access-fgdqd" (OuterVolumeSpecName: "kube-api-access-fgdqd") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "kube-api-access-fgdqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.169472 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.169613 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.171653 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.172226 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.173043 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.173260 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.174239 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.175042 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.178501 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.187704 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.194079 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-inventory" (OuterVolumeSpecName: "inventory") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.195972 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "caadc290-7caf-4c1f-8a2e-4c2b275e572b" (UID: "caadc290-7caf-4c1f-8a2e-4c2b275e572b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.263168 4665 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.263383 4665 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.263495 4665 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.264506 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fgdqd\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-kube-api-access-fgdqd\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.264567 4665 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.264706 4665 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.264815 4665 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.264947 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.265054 4665 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.265126 4665 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/caadc290-7caf-4c1f-8a2e-4c2b275e572b-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.265201 4665 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.265258 4665 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.265337 4665 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.265405 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/caadc290-7caf-4c1f-8a2e-4c2b275e572b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.551869 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" event={"ID":"caadc290-7caf-4c1f-8a2e-4c2b275e572b","Type":"ContainerDied","Data":"a83ae6a7c33732abd21fe13e161e7941f2847f4fbdc5b6cec7cdfe2e4767c167"} Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.551905 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a83ae6a7c33732abd21fe13e161e7941f2847f4fbdc5b6cec7cdfe2e4767c167" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.552199 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.658889 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht"] Dec 05 01:48:12 crc kubenswrapper[4665]: E1205 01:48:12.659328 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caadc290-7caf-4c1f-8a2e-4c2b275e572b" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.659346 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="caadc290-7caf-4c1f-8a2e-4c2b275e572b" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.659526 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="caadc290-7caf-4c1f-8a2e-4c2b275e572b" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.660141 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.665251 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.665447 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.665950 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.665989 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.673102 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht"] Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.678810 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.773602 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.773707 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.773736 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21b83a7a-3186-47f2-851a-b65efe2348a8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.773786 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lhpz4\" (UniqueName: \"kubernetes.io/projected/21b83a7a-3186-47f2-851a-b65efe2348a8-kube-api-access-lhpz4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.773826 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.875061 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lhpz4\" (UniqueName: \"kubernetes.io/projected/21b83a7a-3186-47f2-851a-b65efe2348a8-kube-api-access-lhpz4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.875147 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.875185 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.875281 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.875330 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21b83a7a-3186-47f2-851a-b65efe2348a8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.876228 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21b83a7a-3186-47f2-851a-b65efe2348a8-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.886025 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.886237 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.886789 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.891638 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lhpz4\" (UniqueName: \"kubernetes.io/projected/21b83a7a-3186-47f2-851a-b65efe2348a8-kube-api-access-lhpz4\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-dxlht\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:12 crc kubenswrapper[4665]: I1205 01:48:12.978801 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:48:13 crc kubenswrapper[4665]: I1205 01:48:13.519892 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht"] Dec 05 01:48:13 crc kubenswrapper[4665]: I1205 01:48:13.559415 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" event={"ID":"21b83a7a-3186-47f2-851a-b65efe2348a8","Type":"ContainerStarted","Data":"4f5146e5d108bde1087b47a0f59fdc10e915f11a6f3c7d51cb29665e68fff835"} Dec 05 01:48:14 crc kubenswrapper[4665]: I1205 01:48:14.569572 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" event={"ID":"21b83a7a-3186-47f2-851a-b65efe2348a8","Type":"ContainerStarted","Data":"fedc5e6c0479a08bfe2d84c801f034ab832020020d772d60c4afd215fc0b4a9a"} Dec 05 01:48:14 crc kubenswrapper[4665]: I1205 01:48:14.591254 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" podStartSLOduration=2.412067298 podStartE2EDuration="2.591238341s" podCreationTimestamp="2025-12-05 01:48:12 +0000 UTC" firstStartedPulling="2025-12-05 01:48:13.526609507 +0000 UTC m=+2268.866001806" lastFinishedPulling="2025-12-05 01:48:13.70578055 +0000 UTC m=+2269.045172849" observedRunningTime="2025-12-05 01:48:14.590186175 +0000 UTC m=+2269.929578474" watchObservedRunningTime="2025-12-05 01:48:14.591238341 +0000 UTC m=+2269.930630640" Dec 05 01:49:14 crc kubenswrapper[4665]: I1205 01:49:14.922887 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:49:14 crc kubenswrapper[4665]: I1205 01:49:14.924534 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:49:27 crc kubenswrapper[4665]: I1205 01:49:27.191937 4665 generic.go:334] "Generic (PLEG): container finished" podID="21b83a7a-3186-47f2-851a-b65efe2348a8" containerID="fedc5e6c0479a08bfe2d84c801f034ab832020020d772d60c4afd215fc0b4a9a" exitCode=0 Dec 05 01:49:27 crc kubenswrapper[4665]: I1205 01:49:27.192474 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" event={"ID":"21b83a7a-3186-47f2-851a-b65efe2348a8","Type":"ContainerDied","Data":"fedc5e6c0479a08bfe2d84c801f034ab832020020d772d60c4afd215fc0b4a9a"} Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.660368 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.682847 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ssh-key\") pod \"21b83a7a-3186-47f2-851a-b65efe2348a8\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.682892 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-inventory\") pod \"21b83a7a-3186-47f2-851a-b65efe2348a8\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.734474 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "21b83a7a-3186-47f2-851a-b65efe2348a8" (UID: "21b83a7a-3186-47f2-851a-b65efe2348a8"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.750816 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-inventory" (OuterVolumeSpecName: "inventory") pod "21b83a7a-3186-47f2-851a-b65efe2348a8" (UID: "21b83a7a-3186-47f2-851a-b65efe2348a8"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.784554 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lhpz4\" (UniqueName: \"kubernetes.io/projected/21b83a7a-3186-47f2-851a-b65efe2348a8-kube-api-access-lhpz4\") pod \"21b83a7a-3186-47f2-851a-b65efe2348a8\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.784859 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ovn-combined-ca-bundle\") pod \"21b83a7a-3186-47f2-851a-b65efe2348a8\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.784979 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21b83a7a-3186-47f2-851a-b65efe2348a8-ovncontroller-config-0\") pod \"21b83a7a-3186-47f2-851a-b65efe2348a8\" (UID: \"21b83a7a-3186-47f2-851a-b65efe2348a8\") " Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.785568 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.785663 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.787869 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21b83a7a-3186-47f2-851a-b65efe2348a8-kube-api-access-lhpz4" (OuterVolumeSpecName: "kube-api-access-lhpz4") pod "21b83a7a-3186-47f2-851a-b65efe2348a8" (UID: "21b83a7a-3186-47f2-851a-b65efe2348a8"). InnerVolumeSpecName "kube-api-access-lhpz4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.788559 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "21b83a7a-3186-47f2-851a-b65efe2348a8" (UID: "21b83a7a-3186-47f2-851a-b65efe2348a8"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.807525 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21b83a7a-3186-47f2-851a-b65efe2348a8-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "21b83a7a-3186-47f2-851a-b65efe2348a8" (UID: "21b83a7a-3186-47f2-851a-b65efe2348a8"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.887006 4665 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/21b83a7a-3186-47f2-851a-b65efe2348a8-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.887045 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lhpz4\" (UniqueName: \"kubernetes.io/projected/21b83a7a-3186-47f2-851a-b65efe2348a8-kube-api-access-lhpz4\") on node \"crc\" DevicePath \"\"" Dec 05 01:49:28 crc kubenswrapper[4665]: I1205 01:49:28.887059 4665 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/21b83a7a-3186-47f2-851a-b65efe2348a8-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.211716 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" event={"ID":"21b83a7a-3186-47f2-851a-b65efe2348a8","Type":"ContainerDied","Data":"4f5146e5d108bde1087b47a0f59fdc10e915f11a6f3c7d51cb29665e68fff835"} Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.211753 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4f5146e5d108bde1087b47a0f59fdc10e915f11a6f3c7d51cb29665e68fff835" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.211844 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-dxlht" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.307767 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl"] Dec 05 01:49:29 crc kubenswrapper[4665]: E1205 01:49:29.308238 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21b83a7a-3186-47f2-851a-b65efe2348a8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.308261 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="21b83a7a-3186-47f2-851a-b65efe2348a8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.308510 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="21b83a7a-3186-47f2-851a-b65efe2348a8" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.309135 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.312625 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.313211 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.313612 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.313761 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.314306 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.316677 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.320259 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl"] Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.397139 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.397539 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.397615 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.397698 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.397776 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24ltv\" (UniqueName: \"kubernetes.io/projected/76df2ad8-3e10-41ac-aa00-bea04feee0b9-kube-api-access-24ltv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.398032 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.499079 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.499320 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.499526 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.499640 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24ltv\" (UniqueName: \"kubernetes.io/projected/76df2ad8-3e10-41ac-aa00-bea04feee0b9-kube-api-access-24ltv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.499839 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.499994 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.503775 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.511683 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.512468 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.513922 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.516107 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.517177 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24ltv\" (UniqueName: \"kubernetes.io/projected/76df2ad8-3e10-41ac-aa00-bea04feee0b9-kube-api-access-24ltv\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:29 crc kubenswrapper[4665]: I1205 01:49:29.625536 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:49:30 crc kubenswrapper[4665]: I1205 01:49:30.353732 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl"] Dec 05 01:49:31 crc kubenswrapper[4665]: I1205 01:49:31.228614 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" event={"ID":"76df2ad8-3e10-41ac-aa00-bea04feee0b9","Type":"ContainerStarted","Data":"bd41051ef5b46b4b072752614719757e80eee015d6a79c1ceaadea091130c5ab"} Dec 05 01:49:31 crc kubenswrapper[4665]: I1205 01:49:31.229271 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" event={"ID":"76df2ad8-3e10-41ac-aa00-bea04feee0b9","Type":"ContainerStarted","Data":"0a73943d98c00e8da53c7c21e2c77a4b255b96cfc485ea26011725765c12e3ea"} Dec 05 01:49:31 crc kubenswrapper[4665]: I1205 01:49:31.277513 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" podStartSLOduration=2.106969657 podStartE2EDuration="2.277492203s" podCreationTimestamp="2025-12-05 01:49:29 +0000 UTC" firstStartedPulling="2025-12-05 01:49:30.362392654 +0000 UTC m=+2345.701784953" lastFinishedPulling="2025-12-05 01:49:30.5329152 +0000 UTC m=+2345.872307499" observedRunningTime="2025-12-05 01:49:31.255051158 +0000 UTC m=+2346.594443467" watchObservedRunningTime="2025-12-05 01:49:31.277492203 +0000 UTC m=+2346.616884502" Dec 05 01:49:44 crc kubenswrapper[4665]: I1205 01:49:44.921936 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:49:44 crc kubenswrapper[4665]: I1205 01:49:44.922615 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:50:14 crc kubenswrapper[4665]: I1205 01:50:14.923448 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:50:14 crc kubenswrapper[4665]: I1205 01:50:14.923936 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:50:14 crc kubenswrapper[4665]: I1205 01:50:14.923989 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:50:14 crc kubenswrapper[4665]: I1205 01:50:14.924785 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:50:14 crc kubenswrapper[4665]: I1205 01:50:14.924848 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" gracePeriod=600 Dec 05 01:50:15 crc kubenswrapper[4665]: E1205 01:50:15.043986 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:50:15 crc kubenswrapper[4665]: I1205 01:50:15.577176 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" exitCode=0 Dec 05 01:50:15 crc kubenswrapper[4665]: I1205 01:50:15.577222 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca"} Dec 05 01:50:15 crc kubenswrapper[4665]: I1205 01:50:15.577259 4665 scope.go:117] "RemoveContainer" containerID="5c570a9be3cb99220cfff24bbdba7c2a172ecd2885266f83ae0d281c3e6b2ef1" Dec 05 01:50:15 crc kubenswrapper[4665]: I1205 01:50:15.577960 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:50:15 crc kubenswrapper[4665]: E1205 01:50:15.578321 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:50:28 crc kubenswrapper[4665]: I1205 01:50:28.894008 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:50:28 crc kubenswrapper[4665]: E1205 01:50:28.894732 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:50:30 crc kubenswrapper[4665]: I1205 01:50:30.745784 4665 generic.go:334] "Generic (PLEG): container finished" podID="76df2ad8-3e10-41ac-aa00-bea04feee0b9" containerID="bd41051ef5b46b4b072752614719757e80eee015d6a79c1ceaadea091130c5ab" exitCode=0 Dec 05 01:50:30 crc kubenswrapper[4665]: I1205 01:50:30.745892 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" event={"ID":"76df2ad8-3e10-41ac-aa00-bea04feee0b9","Type":"ContainerDied","Data":"bd41051ef5b46b4b072752614719757e80eee015d6a79c1ceaadea091130c5ab"} Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.215001 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.354995 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24ltv\" (UniqueName: \"kubernetes.io/projected/76df2ad8-3e10-41ac-aa00-bea04feee0b9-kube-api-access-24ltv\") pod \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.355044 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-inventory\") pod \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.355102 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-ovn-metadata-agent-neutron-config-0\") pod \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.355148 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-metadata-combined-ca-bundle\") pod \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.355203 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-nova-metadata-neutron-config-0\") pod \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.355230 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-ssh-key\") pod \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\" (UID: \"76df2ad8-3e10-41ac-aa00-bea04feee0b9\") " Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.360534 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76df2ad8-3e10-41ac-aa00-bea04feee0b9-kube-api-access-24ltv" (OuterVolumeSpecName: "kube-api-access-24ltv") pod "76df2ad8-3e10-41ac-aa00-bea04feee0b9" (UID: "76df2ad8-3e10-41ac-aa00-bea04feee0b9"). InnerVolumeSpecName "kube-api-access-24ltv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.366275 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "76df2ad8-3e10-41ac-aa00-bea04feee0b9" (UID: "76df2ad8-3e10-41ac-aa00-bea04feee0b9"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.384717 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "76df2ad8-3e10-41ac-aa00-bea04feee0b9" (UID: "76df2ad8-3e10-41ac-aa00-bea04feee0b9"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.384791 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "76df2ad8-3e10-41ac-aa00-bea04feee0b9" (UID: "76df2ad8-3e10-41ac-aa00-bea04feee0b9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.387059 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-inventory" (OuterVolumeSpecName: "inventory") pod "76df2ad8-3e10-41ac-aa00-bea04feee0b9" (UID: "76df2ad8-3e10-41ac-aa00-bea04feee0b9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.389494 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "76df2ad8-3e10-41ac-aa00-bea04feee0b9" (UID: "76df2ad8-3e10-41ac-aa00-bea04feee0b9"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.458902 4665 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.458939 4665 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.458955 4665 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.458970 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.458982 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24ltv\" (UniqueName: \"kubernetes.io/projected/76df2ad8-3e10-41ac-aa00-bea04feee0b9-kube-api-access-24ltv\") on node \"crc\" DevicePath \"\"" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.458994 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/76df2ad8-3e10-41ac-aa00-bea04feee0b9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.777435 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" event={"ID":"76df2ad8-3e10-41ac-aa00-bea04feee0b9","Type":"ContainerDied","Data":"0a73943d98c00e8da53c7c21e2c77a4b255b96cfc485ea26011725765c12e3ea"} Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.777485 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a73943d98c00e8da53c7c21e2c77a4b255b96cfc485ea26011725765c12e3ea" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.777589 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.886198 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd"] Dec 05 01:50:32 crc kubenswrapper[4665]: E1205 01:50:32.886575 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76df2ad8-3e10-41ac-aa00-bea04feee0b9" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.886592 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="76df2ad8-3e10-41ac-aa00-bea04feee0b9" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.886805 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="76df2ad8-3e10-41ac-aa00-bea04feee0b9" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.887435 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.889107 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.889395 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.890402 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.891514 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.900861 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.910229 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd"] Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.985657 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.985928 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.986053 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.986173 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:32 crc kubenswrapper[4665]: I1205 01:50:32.986360 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zrfqd\" (UniqueName: \"kubernetes.io/projected/386e08fe-2108-4139-af9d-94fbaa7b7b12-kube-api-access-zrfqd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.089236 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zrfqd\" (UniqueName: \"kubernetes.io/projected/386e08fe-2108-4139-af9d-94fbaa7b7b12-kube-api-access-zrfqd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.089419 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.089448 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.089494 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.089547 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.093038 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.093650 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.093981 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.094232 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.112385 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zrfqd\" (UniqueName: \"kubernetes.io/projected/386e08fe-2108-4139-af9d-94fbaa7b7b12-kube-api-access-zrfqd\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.217273 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.734116 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd"] Dec 05 01:50:33 crc kubenswrapper[4665]: I1205 01:50:33.785798 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" event={"ID":"386e08fe-2108-4139-af9d-94fbaa7b7b12","Type":"ContainerStarted","Data":"80cdfed407ee4860fe16bf3b062ec377c61a2883fa190d2e0684a15961d09c8d"} Dec 05 01:50:34 crc kubenswrapper[4665]: I1205 01:50:34.798005 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" event={"ID":"386e08fe-2108-4139-af9d-94fbaa7b7b12","Type":"ContainerStarted","Data":"5244b8e99bc2d5efe97b934c4b624a97f0f8a4e4a43c049de3a148195a600860"} Dec 05 01:50:34 crc kubenswrapper[4665]: I1205 01:50:34.822857 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" podStartSLOduration=2.640248218 podStartE2EDuration="2.822827727s" podCreationTimestamp="2025-12-05 01:50:32 +0000 UTC" firstStartedPulling="2025-12-05 01:50:33.739950128 +0000 UTC m=+2409.079342427" lastFinishedPulling="2025-12-05 01:50:33.922529637 +0000 UTC m=+2409.261921936" observedRunningTime="2025-12-05 01:50:34.818534583 +0000 UTC m=+2410.157926882" watchObservedRunningTime="2025-12-05 01:50:34.822827727 +0000 UTC m=+2410.162220046" Dec 05 01:50:42 crc kubenswrapper[4665]: I1205 01:50:42.894481 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:50:42 crc kubenswrapper[4665]: E1205 01:50:42.895448 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:50:57 crc kubenswrapper[4665]: I1205 01:50:57.893363 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:50:57 crc kubenswrapper[4665]: E1205 01:50:57.894149 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:51:10 crc kubenswrapper[4665]: I1205 01:51:10.894026 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:51:10 crc kubenswrapper[4665]: E1205 01:51:10.894800 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:51:25 crc kubenswrapper[4665]: I1205 01:51:25.893857 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:51:25 crc kubenswrapper[4665]: E1205 01:51:25.894642 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.168855 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-l454z"] Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.171896 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.193053 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l454z"] Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.345201 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-catalog-content\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.345263 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ggmp4\" (UniqueName: \"kubernetes.io/projected/5080a8a8-9b7d-4627-91bf-e3a87d411e72-kube-api-access-ggmp4\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.345319 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-utilities\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.447800 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-catalog-content\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.447898 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ggmp4\" (UniqueName: \"kubernetes.io/projected/5080a8a8-9b7d-4627-91bf-e3a87d411e72-kube-api-access-ggmp4\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.447939 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-utilities\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.448464 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-catalog-content\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.448525 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-utilities\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.468858 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ggmp4\" (UniqueName: \"kubernetes.io/projected/5080a8a8-9b7d-4627-91bf-e3a87d411e72-kube-api-access-ggmp4\") pod \"certified-operators-l454z\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:33 crc kubenswrapper[4665]: I1205 01:51:33.496405 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:34 crc kubenswrapper[4665]: I1205 01:51:34.031001 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-l454z"] Dec 05 01:51:34 crc kubenswrapper[4665]: I1205 01:51:34.329777 4665 generic.go:334] "Generic (PLEG): container finished" podID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerID="56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49" exitCode=0 Dec 05 01:51:34 crc kubenswrapper[4665]: I1205 01:51:34.329861 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l454z" event={"ID":"5080a8a8-9b7d-4627-91bf-e3a87d411e72","Type":"ContainerDied","Data":"56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49"} Dec 05 01:51:34 crc kubenswrapper[4665]: I1205 01:51:34.330015 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l454z" event={"ID":"5080a8a8-9b7d-4627-91bf-e3a87d411e72","Type":"ContainerStarted","Data":"cc3891416eba0901da59d6187702fd9d7bb9c1299150655e151cda94aed7f178"} Dec 05 01:51:34 crc kubenswrapper[4665]: I1205 01:51:34.332579 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 01:51:35 crc kubenswrapper[4665]: I1205 01:51:35.354468 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l454z" event={"ID":"5080a8a8-9b7d-4627-91bf-e3a87d411e72","Type":"ContainerStarted","Data":"7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90"} Dec 05 01:51:36 crc kubenswrapper[4665]: I1205 01:51:36.365071 4665 generic.go:334] "Generic (PLEG): container finished" podID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerID="7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90" exitCode=0 Dec 05 01:51:36 crc kubenswrapper[4665]: I1205 01:51:36.365126 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l454z" event={"ID":"5080a8a8-9b7d-4627-91bf-e3a87d411e72","Type":"ContainerDied","Data":"7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90"} Dec 05 01:51:38 crc kubenswrapper[4665]: I1205 01:51:38.383093 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l454z" event={"ID":"5080a8a8-9b7d-4627-91bf-e3a87d411e72","Type":"ContainerStarted","Data":"934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff"} Dec 05 01:51:38 crc kubenswrapper[4665]: I1205 01:51:38.402463 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-l454z" podStartSLOduration=2.041648489 podStartE2EDuration="5.402446706s" podCreationTimestamp="2025-12-05 01:51:33 +0000 UTC" firstStartedPulling="2025-12-05 01:51:34.332242952 +0000 UTC m=+2469.671635251" lastFinishedPulling="2025-12-05 01:51:37.693041169 +0000 UTC m=+2473.032433468" observedRunningTime="2025-12-05 01:51:38.39644938 +0000 UTC m=+2473.735841679" watchObservedRunningTime="2025-12-05 01:51:38.402446706 +0000 UTC m=+2473.741839005" Dec 05 01:51:38 crc kubenswrapper[4665]: I1205 01:51:38.894137 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:51:38 crc kubenswrapper[4665]: E1205 01:51:38.894427 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:51:43 crc kubenswrapper[4665]: I1205 01:51:43.496653 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:43 crc kubenswrapper[4665]: I1205 01:51:43.497414 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:43 crc kubenswrapper[4665]: I1205 01:51:43.556727 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:44 crc kubenswrapper[4665]: I1205 01:51:44.477250 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:44 crc kubenswrapper[4665]: I1205 01:51:44.528876 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l454z"] Dec 05 01:51:46 crc kubenswrapper[4665]: I1205 01:51:46.448538 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-l454z" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerName="registry-server" containerID="cri-o://934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff" gracePeriod=2 Dec 05 01:51:46 crc kubenswrapper[4665]: I1205 01:51:46.905203 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:46 crc kubenswrapper[4665]: I1205 01:51:46.908704 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-utilities\") pod \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " Dec 05 01:51:46 crc kubenswrapper[4665]: I1205 01:51:46.908743 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-catalog-content\") pod \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " Dec 05 01:51:46 crc kubenswrapper[4665]: I1205 01:51:46.908831 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ggmp4\" (UniqueName: \"kubernetes.io/projected/5080a8a8-9b7d-4627-91bf-e3a87d411e72-kube-api-access-ggmp4\") pod \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\" (UID: \"5080a8a8-9b7d-4627-91bf-e3a87d411e72\") " Dec 05 01:51:46 crc kubenswrapper[4665]: I1205 01:51:46.909875 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-utilities" (OuterVolumeSpecName: "utilities") pod "5080a8a8-9b7d-4627-91bf-e3a87d411e72" (UID: "5080a8a8-9b7d-4627-91bf-e3a87d411e72"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:51:46 crc kubenswrapper[4665]: I1205 01:51:46.914803 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5080a8a8-9b7d-4627-91bf-e3a87d411e72-kube-api-access-ggmp4" (OuterVolumeSpecName: "kube-api-access-ggmp4") pod "5080a8a8-9b7d-4627-91bf-e3a87d411e72" (UID: "5080a8a8-9b7d-4627-91bf-e3a87d411e72"). InnerVolumeSpecName "kube-api-access-ggmp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:51:46 crc kubenswrapper[4665]: I1205 01:51:46.967086 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5080a8a8-9b7d-4627-91bf-e3a87d411e72" (UID: "5080a8a8-9b7d-4627-91bf-e3a87d411e72"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.011454 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.011496 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5080a8a8-9b7d-4627-91bf-e3a87d411e72-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.011512 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ggmp4\" (UniqueName: \"kubernetes.io/projected/5080a8a8-9b7d-4627-91bf-e3a87d411e72-kube-api-access-ggmp4\") on node \"crc\" DevicePath \"\"" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.228367 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-p6t4b"] Dec 05 01:51:47 crc kubenswrapper[4665]: E1205 01:51:47.228896 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerName="extract-content" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.228921 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerName="extract-content" Dec 05 01:51:47 crc kubenswrapper[4665]: E1205 01:51:47.228948 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerName="extract-utilities" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.228960 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerName="extract-utilities" Dec 05 01:51:47 crc kubenswrapper[4665]: E1205 01:51:47.228975 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerName="registry-server" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.228983 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerName="registry-server" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.229241 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerName="registry-server" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.230996 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.239006 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6t4b"] Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.316323 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-utilities\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.316422 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-catalog-content\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.316467 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h4hd\" (UniqueName: \"kubernetes.io/projected/765538ff-266f-42bb-9e9c-5b46895831b1-kube-api-access-4h4hd\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.417663 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-utilities\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.417739 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-catalog-content\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.417782 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h4hd\" (UniqueName: \"kubernetes.io/projected/765538ff-266f-42bb-9e9c-5b46895831b1-kube-api-access-4h4hd\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.418178 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-utilities\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.418195 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-catalog-content\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.434028 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h4hd\" (UniqueName: \"kubernetes.io/projected/765538ff-266f-42bb-9e9c-5b46895831b1-kube-api-access-4h4hd\") pod \"redhat-marketplace-p6t4b\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.461060 4665 generic.go:334] "Generic (PLEG): container finished" podID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" containerID="934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff" exitCode=0 Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.461106 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l454z" event={"ID":"5080a8a8-9b7d-4627-91bf-e3a87d411e72","Type":"ContainerDied","Data":"934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff"} Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.461136 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-l454z" event={"ID":"5080a8a8-9b7d-4627-91bf-e3a87d411e72","Type":"ContainerDied","Data":"cc3891416eba0901da59d6187702fd9d7bb9c1299150655e151cda94aed7f178"} Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.461157 4665 scope.go:117] "RemoveContainer" containerID="934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.461379 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-l454z" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.501500 4665 scope.go:117] "RemoveContainer" containerID="7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.507026 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-l454z"] Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.517708 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-l454z"] Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.520479 4665 scope.go:117] "RemoveContainer" containerID="56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.536933 4665 scope.go:117] "RemoveContainer" containerID="934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff" Dec 05 01:51:47 crc kubenswrapper[4665]: E1205 01:51:47.537369 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff\": container with ID starting with 934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff not found: ID does not exist" containerID="934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.537413 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff"} err="failed to get container status \"934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff\": rpc error: code = NotFound desc = could not find container \"934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff\": container with ID starting with 934b7bda835bbabb94c6960548561f0e6e30dba2d83c1b5cb9903501b5c053ff not found: ID does not exist" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.537440 4665 scope.go:117] "RemoveContainer" containerID="7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90" Dec 05 01:51:47 crc kubenswrapper[4665]: E1205 01:51:47.537784 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90\": container with ID starting with 7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90 not found: ID does not exist" containerID="7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.537819 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90"} err="failed to get container status \"7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90\": rpc error: code = NotFound desc = could not find container \"7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90\": container with ID starting with 7d3a071f35f34be1193b43c0cf04caed24b381cd1bd183b774db72a65079eb90 not found: ID does not exist" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.537842 4665 scope.go:117] "RemoveContainer" containerID="56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49" Dec 05 01:51:47 crc kubenswrapper[4665]: E1205 01:51:47.538105 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49\": container with ID starting with 56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49 not found: ID does not exist" containerID="56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.538137 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49"} err="failed to get container status \"56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49\": rpc error: code = NotFound desc = could not find container \"56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49\": container with ID starting with 56164ba11070b3042803cbe59dedba44ba25a0a6972609cc366cf17fd4892a49 not found: ID does not exist" Dec 05 01:51:47 crc kubenswrapper[4665]: I1205 01:51:47.608752 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:48 crc kubenswrapper[4665]: W1205 01:51:48.068683 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod765538ff_266f_42bb_9e9c_5b46895831b1.slice/crio-3fd4eaba895df2f2134f4ce3d7a7cbd5af5ef4361d7558a45cc1142f8728699b WatchSource:0}: Error finding container 3fd4eaba895df2f2134f4ce3d7a7cbd5af5ef4361d7558a45cc1142f8728699b: Status 404 returned error can't find the container with id 3fd4eaba895df2f2134f4ce3d7a7cbd5af5ef4361d7558a45cc1142f8728699b Dec 05 01:51:48 crc kubenswrapper[4665]: I1205 01:51:48.070718 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6t4b"] Dec 05 01:51:48 crc kubenswrapper[4665]: I1205 01:51:48.474489 4665 generic.go:334] "Generic (PLEG): container finished" podID="765538ff-266f-42bb-9e9c-5b46895831b1" containerID="9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4" exitCode=0 Dec 05 01:51:48 crc kubenswrapper[4665]: I1205 01:51:48.474567 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6t4b" event={"ID":"765538ff-266f-42bb-9e9c-5b46895831b1","Type":"ContainerDied","Data":"9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4"} Dec 05 01:51:48 crc kubenswrapper[4665]: I1205 01:51:48.474836 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6t4b" event={"ID":"765538ff-266f-42bb-9e9c-5b46895831b1","Type":"ContainerStarted","Data":"3fd4eaba895df2f2134f4ce3d7a7cbd5af5ef4361d7558a45cc1142f8728699b"} Dec 05 01:51:48 crc kubenswrapper[4665]: I1205 01:51:48.908155 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5080a8a8-9b7d-4627-91bf-e3a87d411e72" path="/var/lib/kubelet/pods/5080a8a8-9b7d-4627-91bf-e3a87d411e72/volumes" Dec 05 01:51:49 crc kubenswrapper[4665]: I1205 01:51:49.486862 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6t4b" event={"ID":"765538ff-266f-42bb-9e9c-5b46895831b1","Type":"ContainerStarted","Data":"1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074"} Dec 05 01:51:50 crc kubenswrapper[4665]: I1205 01:51:50.495366 4665 generic.go:334] "Generic (PLEG): container finished" podID="765538ff-266f-42bb-9e9c-5b46895831b1" containerID="1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074" exitCode=0 Dec 05 01:51:50 crc kubenswrapper[4665]: I1205 01:51:50.495457 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6t4b" event={"ID":"765538ff-266f-42bb-9e9c-5b46895831b1","Type":"ContainerDied","Data":"1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074"} Dec 05 01:51:51 crc kubenswrapper[4665]: I1205 01:51:51.508823 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6t4b" event={"ID":"765538ff-266f-42bb-9e9c-5b46895831b1","Type":"ContainerStarted","Data":"be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8"} Dec 05 01:51:51 crc kubenswrapper[4665]: I1205 01:51:51.532028 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-p6t4b" podStartSLOduration=2.028901223 podStartE2EDuration="4.532005587s" podCreationTimestamp="2025-12-05 01:51:47 +0000 UTC" firstStartedPulling="2025-12-05 01:51:48.477329355 +0000 UTC m=+2483.816721654" lastFinishedPulling="2025-12-05 01:51:50.980433719 +0000 UTC m=+2486.319826018" observedRunningTime="2025-12-05 01:51:51.525735635 +0000 UTC m=+2486.865127944" watchObservedRunningTime="2025-12-05 01:51:51.532005587 +0000 UTC m=+2486.871397886" Dec 05 01:51:52 crc kubenswrapper[4665]: I1205 01:51:52.894625 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:51:52 crc kubenswrapper[4665]: E1205 01:51:52.894955 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:51:57 crc kubenswrapper[4665]: I1205 01:51:57.609280 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:57 crc kubenswrapper[4665]: I1205 01:51:57.611214 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:57 crc kubenswrapper[4665]: I1205 01:51:57.657859 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:58 crc kubenswrapper[4665]: I1205 01:51:58.618870 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:51:58 crc kubenswrapper[4665]: I1205 01:51:58.670739 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6t4b"] Dec 05 01:52:00 crc kubenswrapper[4665]: I1205 01:52:00.584505 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-p6t4b" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" containerName="registry-server" containerID="cri-o://be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8" gracePeriod=2 Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.030765 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.183803 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h4hd\" (UniqueName: \"kubernetes.io/projected/765538ff-266f-42bb-9e9c-5b46895831b1-kube-api-access-4h4hd\") pod \"765538ff-266f-42bb-9e9c-5b46895831b1\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.184162 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-catalog-content\") pod \"765538ff-266f-42bb-9e9c-5b46895831b1\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.184252 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-utilities\") pod \"765538ff-266f-42bb-9e9c-5b46895831b1\" (UID: \"765538ff-266f-42bb-9e9c-5b46895831b1\") " Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.185750 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-utilities" (OuterVolumeSpecName: "utilities") pod "765538ff-266f-42bb-9e9c-5b46895831b1" (UID: "765538ff-266f-42bb-9e9c-5b46895831b1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.196617 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/765538ff-266f-42bb-9e9c-5b46895831b1-kube-api-access-4h4hd" (OuterVolumeSpecName: "kube-api-access-4h4hd") pod "765538ff-266f-42bb-9e9c-5b46895831b1" (UID: "765538ff-266f-42bb-9e9c-5b46895831b1"). InnerVolumeSpecName "kube-api-access-4h4hd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.202937 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "765538ff-266f-42bb-9e9c-5b46895831b1" (UID: "765538ff-266f-42bb-9e9c-5b46895831b1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.286873 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.286905 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h4hd\" (UniqueName: \"kubernetes.io/projected/765538ff-266f-42bb-9e9c-5b46895831b1-kube-api-access-4h4hd\") on node \"crc\" DevicePath \"\"" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.286918 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/765538ff-266f-42bb-9e9c-5b46895831b1-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.594356 4665 generic.go:334] "Generic (PLEG): container finished" podID="765538ff-266f-42bb-9e9c-5b46895831b1" containerID="be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8" exitCode=0 Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.594400 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6t4b" event={"ID":"765538ff-266f-42bb-9e9c-5b46895831b1","Type":"ContainerDied","Data":"be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8"} Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.594428 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-p6t4b" event={"ID":"765538ff-266f-42bb-9e9c-5b46895831b1","Type":"ContainerDied","Data":"3fd4eaba895df2f2134f4ce3d7a7cbd5af5ef4361d7558a45cc1142f8728699b"} Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.594450 4665 scope.go:117] "RemoveContainer" containerID="be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.595483 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-p6t4b" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.614414 4665 scope.go:117] "RemoveContainer" containerID="1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.649314 4665 scope.go:117] "RemoveContainer" containerID="9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.653388 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6t4b"] Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.664233 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-p6t4b"] Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.702196 4665 scope.go:117] "RemoveContainer" containerID="be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8" Dec 05 01:52:01 crc kubenswrapper[4665]: E1205 01:52:01.702542 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8\": container with ID starting with be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8 not found: ID does not exist" containerID="be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.702611 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8"} err="failed to get container status \"be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8\": rpc error: code = NotFound desc = could not find container \"be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8\": container with ID starting with be526389f2ff842a6575a8393da30e2a7ee5768438b81f7fc5d6198f05264cb8 not found: ID does not exist" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.702636 4665 scope.go:117] "RemoveContainer" containerID="1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074" Dec 05 01:52:01 crc kubenswrapper[4665]: E1205 01:52:01.702854 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074\": container with ID starting with 1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074 not found: ID does not exist" containerID="1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.702878 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074"} err="failed to get container status \"1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074\": rpc error: code = NotFound desc = could not find container \"1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074\": container with ID starting with 1f9b23dd2824507bd928cfe67047e60c1d065252c4d3ec6704940493fce95074 not found: ID does not exist" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.702892 4665 scope.go:117] "RemoveContainer" containerID="9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4" Dec 05 01:52:01 crc kubenswrapper[4665]: E1205 01:52:01.703105 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4\": container with ID starting with 9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4 not found: ID does not exist" containerID="9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4" Dec 05 01:52:01 crc kubenswrapper[4665]: I1205 01:52:01.703126 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4"} err="failed to get container status \"9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4\": rpc error: code = NotFound desc = could not find container \"9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4\": container with ID starting with 9c83c477d2b47c0c6f7b8ee6bbb26b6c72d72653a3aa5f430ca9ec556cd1fdf4 not found: ID does not exist" Dec 05 01:52:02 crc kubenswrapper[4665]: I1205 01:52:02.902922 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" path="/var/lib/kubelet/pods/765538ff-266f-42bb-9e9c-5b46895831b1/volumes" Dec 05 01:52:03 crc kubenswrapper[4665]: I1205 01:52:03.894158 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:52:03 crc kubenswrapper[4665]: E1205 01:52:03.895082 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:52:16 crc kubenswrapper[4665]: I1205 01:52:16.893715 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:52:16 crc kubenswrapper[4665]: E1205 01:52:16.894801 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.771089 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gfkqz"] Dec 05 01:52:25 crc kubenswrapper[4665]: E1205 01:52:25.772325 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" containerName="extract-utilities" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.772343 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" containerName="extract-utilities" Dec 05 01:52:25 crc kubenswrapper[4665]: E1205 01:52:25.772361 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" containerName="extract-content" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.772369 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" containerName="extract-content" Dec 05 01:52:25 crc kubenswrapper[4665]: E1205 01:52:25.772391 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" containerName="registry-server" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.772400 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" containerName="registry-server" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.772590 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="765538ff-266f-42bb-9e9c-5b46895831b1" containerName="registry-server" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.774208 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.785833 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gfkqz"] Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.859648 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-catalog-content\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.859705 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh4vc\" (UniqueName: \"kubernetes.io/projected/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-kube-api-access-nh4vc\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.859775 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-utilities\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.961522 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-catalog-content\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.961801 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh4vc\" (UniqueName: \"kubernetes.io/projected/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-kube-api-access-nh4vc\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.961954 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-utilities\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.962092 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-catalog-content\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.962394 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-utilities\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:25 crc kubenswrapper[4665]: I1205 01:52:25.985669 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh4vc\" (UniqueName: \"kubernetes.io/projected/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-kube-api-access-nh4vc\") pod \"community-operators-gfkqz\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:26 crc kubenswrapper[4665]: I1205 01:52:26.089355 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:26 crc kubenswrapper[4665]: I1205 01:52:26.671614 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gfkqz"] Dec 05 01:52:26 crc kubenswrapper[4665]: I1205 01:52:26.840216 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gfkqz" event={"ID":"8a02f9a1-3f10-44dc-87dc-b9c8783116ea","Type":"ContainerStarted","Data":"d25e926c654a02fd62dab6cd11f334feb29150a886acd54c83774d8cf6bb1d5b"} Dec 05 01:52:27 crc kubenswrapper[4665]: I1205 01:52:27.849164 4665 generic.go:334] "Generic (PLEG): container finished" podID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerID="fdf417e4812a7935a15f1d1d3d97b8a160a956c1cf040499884b65aa2cd14df5" exitCode=0 Dec 05 01:52:27 crc kubenswrapper[4665]: I1205 01:52:27.849255 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gfkqz" event={"ID":"8a02f9a1-3f10-44dc-87dc-b9c8783116ea","Type":"ContainerDied","Data":"fdf417e4812a7935a15f1d1d3d97b8a160a956c1cf040499884b65aa2cd14df5"} Dec 05 01:52:27 crc kubenswrapper[4665]: I1205 01:52:27.893757 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:52:27 crc kubenswrapper[4665]: E1205 01:52:27.894021 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:52:28 crc kubenswrapper[4665]: I1205 01:52:28.858591 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gfkqz" event={"ID":"8a02f9a1-3f10-44dc-87dc-b9c8783116ea","Type":"ContainerStarted","Data":"00e727d3616f8a0998d8bada790bab683a837d5d73da473feeedc7faf4795fe5"} Dec 05 01:52:29 crc kubenswrapper[4665]: I1205 01:52:29.874093 4665 generic.go:334] "Generic (PLEG): container finished" podID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerID="00e727d3616f8a0998d8bada790bab683a837d5d73da473feeedc7faf4795fe5" exitCode=0 Dec 05 01:52:29 crc kubenswrapper[4665]: I1205 01:52:29.874177 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gfkqz" event={"ID":"8a02f9a1-3f10-44dc-87dc-b9c8783116ea","Type":"ContainerDied","Data":"00e727d3616f8a0998d8bada790bab683a837d5d73da473feeedc7faf4795fe5"} Dec 05 01:52:30 crc kubenswrapper[4665]: I1205 01:52:30.887041 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gfkqz" event={"ID":"8a02f9a1-3f10-44dc-87dc-b9c8783116ea","Type":"ContainerStarted","Data":"5cc966a8113e5d49ace1d5ffb0820dff3903df729857cfe152cb3961ad8d29a5"} Dec 05 01:52:30 crc kubenswrapper[4665]: I1205 01:52:30.910424 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gfkqz" podStartSLOduration=3.412286849 podStartE2EDuration="5.910401451s" podCreationTimestamp="2025-12-05 01:52:25 +0000 UTC" firstStartedPulling="2025-12-05 01:52:27.851068516 +0000 UTC m=+2523.190460805" lastFinishedPulling="2025-12-05 01:52:30.349183108 +0000 UTC m=+2525.688575407" observedRunningTime="2025-12-05 01:52:30.906698791 +0000 UTC m=+2526.246091100" watchObservedRunningTime="2025-12-05 01:52:30.910401451 +0000 UTC m=+2526.249793760" Dec 05 01:52:36 crc kubenswrapper[4665]: I1205 01:52:36.089632 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:36 crc kubenswrapper[4665]: I1205 01:52:36.090128 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:36 crc kubenswrapper[4665]: I1205 01:52:36.152112 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:37 crc kubenswrapper[4665]: I1205 01:52:37.004482 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:37 crc kubenswrapper[4665]: I1205 01:52:37.052814 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gfkqz"] Dec 05 01:52:38 crc kubenswrapper[4665]: I1205 01:52:38.953541 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gfkqz" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerName="registry-server" containerID="cri-o://5cc966a8113e5d49ace1d5ffb0820dff3903df729857cfe152cb3961ad8d29a5" gracePeriod=2 Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.016080 4665 generic.go:334] "Generic (PLEG): container finished" podID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerID="5cc966a8113e5d49ace1d5ffb0820dff3903df729857cfe152cb3961ad8d29a5" exitCode=0 Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.016238 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gfkqz" event={"ID":"8a02f9a1-3f10-44dc-87dc-b9c8783116ea","Type":"ContainerDied","Data":"5cc966a8113e5d49ace1d5ffb0820dff3903df729857cfe152cb3961ad8d29a5"} Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.336763 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.366766 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nh4vc\" (UniqueName: \"kubernetes.io/projected/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-kube-api-access-nh4vc\") pod \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.382527 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-kube-api-access-nh4vc" (OuterVolumeSpecName: "kube-api-access-nh4vc") pod "8a02f9a1-3f10-44dc-87dc-b9c8783116ea" (UID: "8a02f9a1-3f10-44dc-87dc-b9c8783116ea"). InnerVolumeSpecName "kube-api-access-nh4vc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.468571 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-catalog-content\") pod \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.469108 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-utilities\") pod \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\" (UID: \"8a02f9a1-3f10-44dc-87dc-b9c8783116ea\") " Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.469983 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nh4vc\" (UniqueName: \"kubernetes.io/projected/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-kube-api-access-nh4vc\") on node \"crc\" DevicePath \"\"" Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.470035 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-utilities" (OuterVolumeSpecName: "utilities") pod "8a02f9a1-3f10-44dc-87dc-b9c8783116ea" (UID: "8a02f9a1-3f10-44dc-87dc-b9c8783116ea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.515983 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8a02f9a1-3f10-44dc-87dc-b9c8783116ea" (UID: "8a02f9a1-3f10-44dc-87dc-b9c8783116ea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.571487 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.571734 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8a02f9a1-3f10-44dc-87dc-b9c8783116ea-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:52:40 crc kubenswrapper[4665]: I1205 01:52:40.894152 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:52:40 crc kubenswrapper[4665]: E1205 01:52:40.894931 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:52:41 crc kubenswrapper[4665]: I1205 01:52:41.031313 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gfkqz" event={"ID":"8a02f9a1-3f10-44dc-87dc-b9c8783116ea","Type":"ContainerDied","Data":"d25e926c654a02fd62dab6cd11f334feb29150a886acd54c83774d8cf6bb1d5b"} Dec 05 01:52:41 crc kubenswrapper[4665]: I1205 01:52:41.031366 4665 scope.go:117] "RemoveContainer" containerID="5cc966a8113e5d49ace1d5ffb0820dff3903df729857cfe152cb3961ad8d29a5" Dec 05 01:52:41 crc kubenswrapper[4665]: I1205 01:52:41.031507 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gfkqz" Dec 05 01:52:41 crc kubenswrapper[4665]: I1205 01:52:41.059511 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gfkqz"] Dec 05 01:52:41 crc kubenswrapper[4665]: I1205 01:52:41.070255 4665 scope.go:117] "RemoveContainer" containerID="00e727d3616f8a0998d8bada790bab683a837d5d73da473feeedc7faf4795fe5" Dec 05 01:52:41 crc kubenswrapper[4665]: I1205 01:52:41.071764 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gfkqz"] Dec 05 01:52:41 crc kubenswrapper[4665]: I1205 01:52:41.102211 4665 scope.go:117] "RemoveContainer" containerID="fdf417e4812a7935a15f1d1d3d97b8a160a956c1cf040499884b65aa2cd14df5" Dec 05 01:52:42 crc kubenswrapper[4665]: I1205 01:52:42.909532 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" path="/var/lib/kubelet/pods/8a02f9a1-3f10-44dc-87dc-b9c8783116ea/volumes" Dec 05 01:52:53 crc kubenswrapper[4665]: I1205 01:52:53.893861 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:52:53 crc kubenswrapper[4665]: E1205 01:52:53.895337 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:53:07 crc kubenswrapper[4665]: I1205 01:53:07.893435 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:53:07 crc kubenswrapper[4665]: E1205 01:53:07.894649 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:53:21 crc kubenswrapper[4665]: I1205 01:53:21.893487 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:53:21 crc kubenswrapper[4665]: E1205 01:53:21.894152 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:53:36 crc kubenswrapper[4665]: I1205 01:53:36.893850 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:53:36 crc kubenswrapper[4665]: E1205 01:53:36.894883 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:53:47 crc kubenswrapper[4665]: I1205 01:53:47.894174 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:53:47 crc kubenswrapper[4665]: E1205 01:53:47.894980 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:54:00 crc kubenswrapper[4665]: I1205 01:54:00.893771 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:54:00 crc kubenswrapper[4665]: E1205 01:54:00.894431 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:54:12 crc kubenswrapper[4665]: I1205 01:54:12.893396 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:54:12 crc kubenswrapper[4665]: E1205 01:54:12.894120 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:54:26 crc kubenswrapper[4665]: I1205 01:54:26.893426 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:54:26 crc kubenswrapper[4665]: E1205 01:54:26.894181 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:54:38 crc kubenswrapper[4665]: I1205 01:54:38.894070 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:54:38 crc kubenswrapper[4665]: E1205 01:54:38.894900 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:54:52 crc kubenswrapper[4665]: I1205 01:54:52.894833 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:54:52 crc kubenswrapper[4665]: E1205 01:54:52.896958 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:55:04 crc kubenswrapper[4665]: I1205 01:55:04.898516 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:55:04 crc kubenswrapper[4665]: E1205 01:55:04.899364 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 01:55:11 crc kubenswrapper[4665]: I1205 01:55:11.381707 4665 generic.go:334] "Generic (PLEG): container finished" podID="386e08fe-2108-4139-af9d-94fbaa7b7b12" containerID="5244b8e99bc2d5efe97b934c4b624a97f0f8a4e4a43c049de3a148195a600860" exitCode=0 Dec 05 01:55:11 crc kubenswrapper[4665]: I1205 01:55:11.381780 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" event={"ID":"386e08fe-2108-4139-af9d-94fbaa7b7b12","Type":"ContainerDied","Data":"5244b8e99bc2d5efe97b934c4b624a97f0f8a4e4a43c049de3a148195a600860"} Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.837259 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.938634 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-secret-0\") pod \"386e08fe-2108-4139-af9d-94fbaa7b7b12\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.938812 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-inventory\") pod \"386e08fe-2108-4139-af9d-94fbaa7b7b12\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.938846 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zrfqd\" (UniqueName: \"kubernetes.io/projected/386e08fe-2108-4139-af9d-94fbaa7b7b12-kube-api-access-zrfqd\") pod \"386e08fe-2108-4139-af9d-94fbaa7b7b12\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.938910 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-combined-ca-bundle\") pod \"386e08fe-2108-4139-af9d-94fbaa7b7b12\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.938959 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-ssh-key\") pod \"386e08fe-2108-4139-af9d-94fbaa7b7b12\" (UID: \"386e08fe-2108-4139-af9d-94fbaa7b7b12\") " Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.944370 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "386e08fe-2108-4139-af9d-94fbaa7b7b12" (UID: "386e08fe-2108-4139-af9d-94fbaa7b7b12"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.952536 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/386e08fe-2108-4139-af9d-94fbaa7b7b12-kube-api-access-zrfqd" (OuterVolumeSpecName: "kube-api-access-zrfqd") pod "386e08fe-2108-4139-af9d-94fbaa7b7b12" (UID: "386e08fe-2108-4139-af9d-94fbaa7b7b12"). InnerVolumeSpecName "kube-api-access-zrfqd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.968441 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "386e08fe-2108-4139-af9d-94fbaa7b7b12" (UID: "386e08fe-2108-4139-af9d-94fbaa7b7b12"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.976622 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "386e08fe-2108-4139-af9d-94fbaa7b7b12" (UID: "386e08fe-2108-4139-af9d-94fbaa7b7b12"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:55:12 crc kubenswrapper[4665]: I1205 01:55:12.985253 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-inventory" (OuterVolumeSpecName: "inventory") pod "386e08fe-2108-4139-af9d-94fbaa7b7b12" (UID: "386e08fe-2108-4139-af9d-94fbaa7b7b12"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.041577 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.041629 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zrfqd\" (UniqueName: \"kubernetes.io/projected/386e08fe-2108-4139-af9d-94fbaa7b7b12-kube-api-access-zrfqd\") on node \"crc\" DevicePath \"\"" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.041646 4665 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.041659 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.041669 4665 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/386e08fe-2108-4139-af9d-94fbaa7b7b12-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.402695 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" event={"ID":"386e08fe-2108-4139-af9d-94fbaa7b7b12","Type":"ContainerDied","Data":"80cdfed407ee4860fe16bf3b062ec377c61a2883fa190d2e0684a15961d09c8d"} Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.402734 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80cdfed407ee4860fe16bf3b062ec377c61a2883fa190d2e0684a15961d09c8d" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.402790 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.505581 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8"] Dec 05 01:55:13 crc kubenswrapper[4665]: E1205 01:55:13.505939 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerName="extract-content" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.505955 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerName="extract-content" Dec 05 01:55:13 crc kubenswrapper[4665]: E1205 01:55:13.505975 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerName="extract-utilities" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.505982 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerName="extract-utilities" Dec 05 01:55:13 crc kubenswrapper[4665]: E1205 01:55:13.505998 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="386e08fe-2108-4139-af9d-94fbaa7b7b12" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.506008 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="386e08fe-2108-4139-af9d-94fbaa7b7b12" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 01:55:13 crc kubenswrapper[4665]: E1205 01:55:13.506017 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerName="registry-server" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.506024 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerName="registry-server" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.506209 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="386e08fe-2108-4139-af9d-94fbaa7b7b12" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.506239 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8a02f9a1-3f10-44dc-87dc-b9c8783116ea" containerName="registry-server" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.506869 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.509567 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.510739 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.511009 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.511123 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.511195 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.511246 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.511951 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.528941 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8"] Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651469 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651535 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651572 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fc2bg\" (UniqueName: \"kubernetes.io/projected/31efb8b4-c179-4d28-b197-2803bef0c22e-kube-api-access-fc2bg\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651600 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651628 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651653 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651707 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651742 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.651832 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753328 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753399 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753483 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753523 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753544 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753575 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fc2bg\" (UniqueName: \"kubernetes.io/projected/31efb8b4-c179-4d28-b197-2803bef0c22e-kube-api-access-fc2bg\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753596 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753622 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.753643 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.754840 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.759000 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.759022 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.759412 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.761677 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.762021 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.767072 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.769611 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fc2bg\" (UniqueName: \"kubernetes.io/projected/31efb8b4-c179-4d28-b197-2803bef0c22e-kube-api-access-fc2bg\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.773720 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-85px8\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:13 crc kubenswrapper[4665]: I1205 01:55:13.824975 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:55:14 crc kubenswrapper[4665]: I1205 01:55:14.324034 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8"] Dec 05 01:55:14 crc kubenswrapper[4665]: I1205 01:55:14.417268 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" event={"ID":"31efb8b4-c179-4d28-b197-2803bef0c22e","Type":"ContainerStarted","Data":"b22c9fafee8725def9a2475200caca79c5c1d342c8fcfba70f2ea56bed601e67"} Dec 05 01:55:15 crc kubenswrapper[4665]: I1205 01:55:15.427220 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" event={"ID":"31efb8b4-c179-4d28-b197-2803bef0c22e","Type":"ContainerStarted","Data":"94772d956250ac0e4c5514986271f87d328b293768905c9486c6be73d2816274"} Dec 05 01:55:15 crc kubenswrapper[4665]: I1205 01:55:15.448094 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" podStartSLOduration=2.219430422 podStartE2EDuration="2.448075548s" podCreationTimestamp="2025-12-05 01:55:13 +0000 UTC" firstStartedPulling="2025-12-05 01:55:14.330522271 +0000 UTC m=+2689.669914570" lastFinishedPulling="2025-12-05 01:55:14.559167397 +0000 UTC m=+2689.898559696" observedRunningTime="2025-12-05 01:55:15.443017285 +0000 UTC m=+2690.782409574" watchObservedRunningTime="2025-12-05 01:55:15.448075548 +0000 UTC m=+2690.787467867" Dec 05 01:55:19 crc kubenswrapper[4665]: I1205 01:55:19.893205 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 01:55:20 crc kubenswrapper[4665]: I1205 01:55:20.470553 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"485c242ff54618088441ff4aa43b1f7d69f9ff4283c4ba36cd5f5eb843639d41"} Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.243818 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gljlf"] Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.246576 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.274050 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gljlf"] Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.335371 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-utilities\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.335442 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-grpw9\" (UniqueName: \"kubernetes.io/projected/c81b1654-4e32-4ca9-b48c-a0f568a74480-kube-api-access-grpw9\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.335587 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-catalog-content\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.437565 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-catalog-content\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.437859 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-utilities\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.437903 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-grpw9\" (UniqueName: \"kubernetes.io/projected/c81b1654-4e32-4ca9-b48c-a0f568a74480-kube-api-access-grpw9\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.438052 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-catalog-content\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.438265 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-utilities\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.463008 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-grpw9\" (UniqueName: \"kubernetes.io/projected/c81b1654-4e32-4ca9-b48c-a0f568a74480-kube-api-access-grpw9\") pod \"redhat-operators-gljlf\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:56:59 crc kubenswrapper[4665]: I1205 01:56:59.601747 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:57:00 crc kubenswrapper[4665]: I1205 01:57:00.092424 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gljlf"] Dec 05 01:57:00 crc kubenswrapper[4665]: I1205 01:57:00.337732 4665 generic.go:334] "Generic (PLEG): container finished" podID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerID="99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6" exitCode=0 Dec 05 01:57:00 crc kubenswrapper[4665]: I1205 01:57:00.337870 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gljlf" event={"ID":"c81b1654-4e32-4ca9-b48c-a0f568a74480","Type":"ContainerDied","Data":"99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6"} Dec 05 01:57:00 crc kubenswrapper[4665]: I1205 01:57:00.339711 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gljlf" event={"ID":"c81b1654-4e32-4ca9-b48c-a0f568a74480","Type":"ContainerStarted","Data":"ce07ae7f7dd21c21fff0c6e825a729ee27ac603eeb9b114def89be5778a9eebf"} Dec 05 01:57:00 crc kubenswrapper[4665]: I1205 01:57:00.339889 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 01:57:01 crc kubenswrapper[4665]: I1205 01:57:01.352941 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gljlf" event={"ID":"c81b1654-4e32-4ca9-b48c-a0f568a74480","Type":"ContainerStarted","Data":"61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2"} Dec 05 01:57:04 crc kubenswrapper[4665]: I1205 01:57:04.378934 4665 generic.go:334] "Generic (PLEG): container finished" podID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerID="61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2" exitCode=0 Dec 05 01:57:04 crc kubenswrapper[4665]: I1205 01:57:04.378976 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gljlf" event={"ID":"c81b1654-4e32-4ca9-b48c-a0f568a74480","Type":"ContainerDied","Data":"61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2"} Dec 05 01:57:05 crc kubenswrapper[4665]: I1205 01:57:05.390555 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gljlf" event={"ID":"c81b1654-4e32-4ca9-b48c-a0f568a74480","Type":"ContainerStarted","Data":"39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e"} Dec 05 01:57:05 crc kubenswrapper[4665]: I1205 01:57:05.412272 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gljlf" podStartSLOduration=1.953859198 podStartE2EDuration="6.412252616s" podCreationTimestamp="2025-12-05 01:56:59 +0000 UTC" firstStartedPulling="2025-12-05 01:57:00.339653535 +0000 UTC m=+2795.679045834" lastFinishedPulling="2025-12-05 01:57:04.798046963 +0000 UTC m=+2800.137439252" observedRunningTime="2025-12-05 01:57:05.408780451 +0000 UTC m=+2800.748172750" watchObservedRunningTime="2025-12-05 01:57:05.412252616 +0000 UTC m=+2800.751644915" Dec 05 01:57:09 crc kubenswrapper[4665]: I1205 01:57:09.629501 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:57:09 crc kubenswrapper[4665]: I1205 01:57:09.630937 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:57:10 crc kubenswrapper[4665]: I1205 01:57:10.681131 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gljlf" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="registry-server" probeResult="failure" output=< Dec 05 01:57:10 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 01:57:10 crc kubenswrapper[4665]: > Dec 05 01:57:19 crc kubenswrapper[4665]: I1205 01:57:19.677892 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:57:19 crc kubenswrapper[4665]: I1205 01:57:19.740277 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:57:19 crc kubenswrapper[4665]: I1205 01:57:19.918394 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gljlf"] Dec 05 01:57:21 crc kubenswrapper[4665]: I1205 01:57:21.519843 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gljlf" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="registry-server" containerID="cri-o://39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e" gracePeriod=2 Dec 05 01:57:21 crc kubenswrapper[4665]: I1205 01:57:21.974604 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.088265 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-grpw9\" (UniqueName: \"kubernetes.io/projected/c81b1654-4e32-4ca9-b48c-a0f568a74480-kube-api-access-grpw9\") pod \"c81b1654-4e32-4ca9-b48c-a0f568a74480\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.088781 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-utilities\") pod \"c81b1654-4e32-4ca9-b48c-a0f568a74480\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.088865 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-catalog-content\") pod \"c81b1654-4e32-4ca9-b48c-a0f568a74480\" (UID: \"c81b1654-4e32-4ca9-b48c-a0f568a74480\") " Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.090148 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-utilities" (OuterVolumeSpecName: "utilities") pod "c81b1654-4e32-4ca9-b48c-a0f568a74480" (UID: "c81b1654-4e32-4ca9-b48c-a0f568a74480"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.104434 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c81b1654-4e32-4ca9-b48c-a0f568a74480-kube-api-access-grpw9" (OuterVolumeSpecName: "kube-api-access-grpw9") pod "c81b1654-4e32-4ca9-b48c-a0f568a74480" (UID: "c81b1654-4e32-4ca9-b48c-a0f568a74480"). InnerVolumeSpecName "kube-api-access-grpw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.190855 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.190882 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-grpw9\" (UniqueName: \"kubernetes.io/projected/c81b1654-4e32-4ca9-b48c-a0f568a74480-kube-api-access-grpw9\") on node \"crc\" DevicePath \"\"" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.192184 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c81b1654-4e32-4ca9-b48c-a0f568a74480" (UID: "c81b1654-4e32-4ca9-b48c-a0f568a74480"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.292979 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c81b1654-4e32-4ca9-b48c-a0f568a74480-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.529788 4665 generic.go:334] "Generic (PLEG): container finished" podID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerID="39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e" exitCode=0 Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.529847 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gljlf" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.529834 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gljlf" event={"ID":"c81b1654-4e32-4ca9-b48c-a0f568a74480","Type":"ContainerDied","Data":"39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e"} Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.530805 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gljlf" event={"ID":"c81b1654-4e32-4ca9-b48c-a0f568a74480","Type":"ContainerDied","Data":"ce07ae7f7dd21c21fff0c6e825a729ee27ac603eeb9b114def89be5778a9eebf"} Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.530886 4665 scope.go:117] "RemoveContainer" containerID="39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.556360 4665 scope.go:117] "RemoveContainer" containerID="61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.577022 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gljlf"] Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.586883 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gljlf"] Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.586901 4665 scope.go:117] "RemoveContainer" containerID="99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.626436 4665 scope.go:117] "RemoveContainer" containerID="39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e" Dec 05 01:57:22 crc kubenswrapper[4665]: E1205 01:57:22.626866 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e\": container with ID starting with 39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e not found: ID does not exist" containerID="39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.626913 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e"} err="failed to get container status \"39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e\": rpc error: code = NotFound desc = could not find container \"39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e\": container with ID starting with 39c839e527af6ecd7ac86b6eead451ff05095c5e5c81d3a56e224385f5c0df5e not found: ID does not exist" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.626939 4665 scope.go:117] "RemoveContainer" containerID="61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2" Dec 05 01:57:22 crc kubenswrapper[4665]: E1205 01:57:22.627360 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2\": container with ID starting with 61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2 not found: ID does not exist" containerID="61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.627484 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2"} err="failed to get container status \"61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2\": rpc error: code = NotFound desc = could not find container \"61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2\": container with ID starting with 61e2b2b245baf97bb67ce578b006d0a949b706e6725c8b04e1993183df0733d2 not found: ID does not exist" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.627575 4665 scope.go:117] "RemoveContainer" containerID="99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6" Dec 05 01:57:22 crc kubenswrapper[4665]: E1205 01:57:22.627930 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6\": container with ID starting with 99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6 not found: ID does not exist" containerID="99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.627970 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6"} err="failed to get container status \"99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6\": rpc error: code = NotFound desc = could not find container \"99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6\": container with ID starting with 99cb72bd114ce636b424c8facb63992dd8c10f295e5bf3f4756b5b28ba3ab7a6 not found: ID does not exist" Dec 05 01:57:22 crc kubenswrapper[4665]: I1205 01:57:22.906657 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" path="/var/lib/kubelet/pods/c81b1654-4e32-4ca9-b48c-a0f568a74480/volumes" Dec 05 01:57:44 crc kubenswrapper[4665]: I1205 01:57:44.922342 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:57:44 crc kubenswrapper[4665]: I1205 01:57:44.924487 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:58:14 crc kubenswrapper[4665]: I1205 01:58:14.922553 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:58:14 crc kubenswrapper[4665]: I1205 01:58:14.923163 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:58:28 crc kubenswrapper[4665]: I1205 01:58:28.154830 4665 generic.go:334] "Generic (PLEG): container finished" podID="31efb8b4-c179-4d28-b197-2803bef0c22e" containerID="94772d956250ac0e4c5514986271f87d328b293768905c9486c6be73d2816274" exitCode=0 Dec 05 01:58:28 crc kubenswrapper[4665]: I1205 01:58:28.154950 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" event={"ID":"31efb8b4-c179-4d28-b197-2803bef0c22e","Type":"ContainerDied","Data":"94772d956250ac0e4c5514986271f87d328b293768905c9486c6be73d2816274"} Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.641386 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713043 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-1\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713122 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-1\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713160 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-combined-ca-bundle\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713231 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-extra-config-0\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713409 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fc2bg\" (UniqueName: \"kubernetes.io/projected/31efb8b4-c179-4d28-b197-2803bef0c22e-kube-api-access-fc2bg\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713441 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-0\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713484 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-ssh-key\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713582 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-inventory\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.713623 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-0\") pod \"31efb8b4-c179-4d28-b197-2803bef0c22e\" (UID: \"31efb8b4-c179-4d28-b197-2803bef0c22e\") " Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.732808 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31efb8b4-c179-4d28-b197-2803bef0c22e-kube-api-access-fc2bg" (OuterVolumeSpecName: "kube-api-access-fc2bg") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "kube-api-access-fc2bg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.736319 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.747525 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.751428 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.759546 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-inventory" (OuterVolumeSpecName: "inventory") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.768829 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.772041 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.777057 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.782973 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "31efb8b4-c179-4d28-b197-2803bef0c22e" (UID: "31efb8b4-c179-4d28-b197-2803bef0c22e"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816386 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fc2bg\" (UniqueName: \"kubernetes.io/projected/31efb8b4-c179-4d28-b197-2803bef0c22e-kube-api-access-fc2bg\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816415 4665 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816426 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816437 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816446 4665 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816455 4665 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816463 4665 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816471 4665 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:29 crc kubenswrapper[4665]: I1205 01:58:29.816479 4665 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/31efb8b4-c179-4d28-b197-2803bef0c22e-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.184359 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" event={"ID":"31efb8b4-c179-4d28-b197-2803bef0c22e","Type":"ContainerDied","Data":"b22c9fafee8725def9a2475200caca79c5c1d342c8fcfba70f2ea56bed601e67"} Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.184396 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b22c9fafee8725def9a2475200caca79c5c1d342c8fcfba70f2ea56bed601e67" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.184453 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-85px8" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.355702 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76"] Dec 05 01:58:30 crc kubenswrapper[4665]: E1205 01:58:30.356259 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="registry-server" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.356387 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="registry-server" Dec 05 01:58:30 crc kubenswrapper[4665]: E1205 01:58:30.356457 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31efb8b4-c179-4d28-b197-2803bef0c22e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.356515 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="31efb8b4-c179-4d28-b197-2803bef0c22e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 01:58:30 crc kubenswrapper[4665]: E1205 01:58:30.356581 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="extract-utilities" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.356630 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="extract-utilities" Dec 05 01:58:30 crc kubenswrapper[4665]: E1205 01:58:30.356695 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="extract-content" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.356752 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="extract-content" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.356973 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="31efb8b4-c179-4d28-b197-2803bef0c22e" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.357039 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="c81b1654-4e32-4ca9-b48c-a0f568a74480" containerName="registry-server" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.357907 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.360101 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.360569 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.360953 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.361892 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.362457 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-r7jw8" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.369869 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76"] Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.425878 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.426219 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.426279 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.426348 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.426396 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.426418 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.426539 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rh7q2\" (UniqueName: \"kubernetes.io/projected/820a4267-c307-42cb-96cb-482a2919cfe7-kube-api-access-rh7q2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.528745 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rh7q2\" (UniqueName: \"kubernetes.io/projected/820a4267-c307-42cb-96cb-482a2919cfe7-kube-api-access-rh7q2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.528864 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.528924 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.528977 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.529036 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.529098 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.529128 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.533080 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.533423 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.533565 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.540163 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.541278 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.542897 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.546209 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rh7q2\" (UniqueName: \"kubernetes.io/projected/820a4267-c307-42cb-96cb-482a2919cfe7-kube-api-access-rh7q2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-hrk76\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:30 crc kubenswrapper[4665]: I1205 01:58:30.677197 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 01:58:31 crc kubenswrapper[4665]: I1205 01:58:31.225820 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76"] Dec 05 01:58:32 crc kubenswrapper[4665]: I1205 01:58:32.200317 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" event={"ID":"820a4267-c307-42cb-96cb-482a2919cfe7","Type":"ContainerStarted","Data":"3e39f3c51b9bd2546d7b15c42b1ead37405b8f1eb47d49b31e9eadb972c9dab8"} Dec 05 01:58:32 crc kubenswrapper[4665]: I1205 01:58:32.200666 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" event={"ID":"820a4267-c307-42cb-96cb-482a2919cfe7","Type":"ContainerStarted","Data":"3c0ffed29f83533f8f278b1702d15f30aa3fe3419eb14d2ca57cff29f643cc13"} Dec 05 01:58:32 crc kubenswrapper[4665]: I1205 01:58:32.218454 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" podStartSLOduration=2.054229567 podStartE2EDuration="2.21843864s" podCreationTimestamp="2025-12-05 01:58:30 +0000 UTC" firstStartedPulling="2025-12-05 01:58:31.219383096 +0000 UTC m=+2886.558775395" lastFinishedPulling="2025-12-05 01:58:31.383592169 +0000 UTC m=+2886.722984468" observedRunningTime="2025-12-05 01:58:32.216509073 +0000 UTC m=+2887.555901372" watchObservedRunningTime="2025-12-05 01:58:32.21843864 +0000 UTC m=+2887.557830939" Dec 05 01:58:44 crc kubenswrapper[4665]: I1205 01:58:44.922058 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 01:58:44 crc kubenswrapper[4665]: I1205 01:58:44.922657 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 01:58:44 crc kubenswrapper[4665]: I1205 01:58:44.922714 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 01:58:44 crc kubenswrapper[4665]: I1205 01:58:44.923565 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"485c242ff54618088441ff4aa43b1f7d69f9ff4283c4ba36cd5f5eb843639d41"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 01:58:44 crc kubenswrapper[4665]: I1205 01:58:44.923631 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://485c242ff54618088441ff4aa43b1f7d69f9ff4283c4ba36cd5f5eb843639d41" gracePeriod=600 Dec 05 01:58:45 crc kubenswrapper[4665]: I1205 01:58:45.309173 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="485c242ff54618088441ff4aa43b1f7d69f9ff4283c4ba36cd5f5eb843639d41" exitCode=0 Dec 05 01:58:45 crc kubenswrapper[4665]: I1205 01:58:45.309427 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"485c242ff54618088441ff4aa43b1f7d69f9ff4283c4ba36cd5f5eb843639d41"} Dec 05 01:58:45 crc kubenswrapper[4665]: I1205 01:58:45.310241 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224"} Dec 05 01:58:45 crc kubenswrapper[4665]: I1205 01:58:45.310286 4665 scope.go:117] "RemoveContainer" containerID="2cf450b4d23edc53bee78e25e5aad349535c91c78b1ceddc44185fff746476ca" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.148655 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j"] Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.150727 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.154664 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.158121 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.174750 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j"] Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.283359 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-config-volume\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.283798 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-secret-volume\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.284362 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hwqcq\" (UniqueName: \"kubernetes.io/projected/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-kube-api-access-hwqcq\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.385989 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-config-volume\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.386364 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-secret-volume\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.386619 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hwqcq\" (UniqueName: \"kubernetes.io/projected/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-kube-api-access-hwqcq\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.386961 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-config-volume\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.395320 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-secret-volume\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.409612 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hwqcq\" (UniqueName: \"kubernetes.io/projected/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-kube-api-access-hwqcq\") pod \"collect-profiles-29415000-hcp4j\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.471216 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:00 crc kubenswrapper[4665]: I1205 02:00:00.998996 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j"] Dec 05 02:00:02 crc kubenswrapper[4665]: I1205 02:00:02.039497 4665 generic.go:334] "Generic (PLEG): container finished" podID="e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c" containerID="5ad5b81683b36936a1ac9f253211589518ccf713d6f7d3c6d8c5e61ba68e6340" exitCode=0 Dec 05 02:00:02 crc kubenswrapper[4665]: I1205 02:00:02.039548 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" event={"ID":"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c","Type":"ContainerDied","Data":"5ad5b81683b36936a1ac9f253211589518ccf713d6f7d3c6d8c5e61ba68e6340"} Dec 05 02:00:02 crc kubenswrapper[4665]: I1205 02:00:02.039816 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" event={"ID":"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c","Type":"ContainerStarted","Data":"2c66b32769db6c1c7efd695fd4b2d8e94abb2df66679196a15ec057b772b44ce"} Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.448087 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.555419 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-secret-volume\") pod \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.555691 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-config-volume\") pod \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.555733 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hwqcq\" (UniqueName: \"kubernetes.io/projected/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-kube-api-access-hwqcq\") pod \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\" (UID: \"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c\") " Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.556267 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-config-volume" (OuterVolumeSpecName: "config-volume") pod "e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c" (UID: "e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.562449 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c" (UID: "e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.562519 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-kube-api-access-hwqcq" (OuterVolumeSpecName: "kube-api-access-hwqcq") pod "e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c" (UID: "e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c"). InnerVolumeSpecName "kube-api-access-hwqcq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.658993 4665 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.659020 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hwqcq\" (UniqueName: \"kubernetes.io/projected/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-kube-api-access-hwqcq\") on node \"crc\" DevicePath \"\"" Dec 05 02:00:03 crc kubenswrapper[4665]: I1205 02:00:03.659030 4665 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 02:00:04 crc kubenswrapper[4665]: I1205 02:00:04.075918 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" event={"ID":"e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c","Type":"ContainerDied","Data":"2c66b32769db6c1c7efd695fd4b2d8e94abb2df66679196a15ec057b772b44ce"} Dec 05 02:00:04 crc kubenswrapper[4665]: I1205 02:00:04.075977 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2c66b32769db6c1c7efd695fd4b2d8e94abb2df66679196a15ec057b772b44ce" Dec 05 02:00:04 crc kubenswrapper[4665]: I1205 02:00:04.075976 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415000-hcp4j" Dec 05 02:00:04 crc kubenswrapper[4665]: I1205 02:00:04.536012 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss"] Dec 05 02:00:04 crc kubenswrapper[4665]: I1205 02:00:04.543820 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414955-2klss"] Dec 05 02:00:04 crc kubenswrapper[4665]: I1205 02:00:04.926849 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="29229102-10ab-474a-a236-0dd9bb1553cb" path="/var/lib/kubelet/pods/29229102-10ab-474a-a236-0dd9bb1553cb/volumes" Dec 05 02:00:34 crc kubenswrapper[4665]: I1205 02:00:34.205421 4665 scope.go:117] "RemoveContainer" containerID="19ae4d42c17f241db84dd7f9a015c0e4a98dbaa2fb1f50e8c110ccf931277448" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.157159 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415001-s26ld"] Dec 05 02:01:00 crc kubenswrapper[4665]: E1205 02:01:00.158111 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c" containerName="collect-profiles" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.158124 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c" containerName="collect-profiles" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.158337 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="e3552635-07a7-4ac8-bb4b-f7f3a6be8b6c" containerName="collect-profiles" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.158984 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.173014 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415001-s26ld"] Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.335607 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k8vl\" (UniqueName: \"kubernetes.io/projected/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-kube-api-access-9k8vl\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.335672 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-config-data\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.335863 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-combined-ca-bundle\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.335906 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-fernet-keys\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.438112 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-combined-ca-bundle\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.438486 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-fernet-keys\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.438582 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k8vl\" (UniqueName: \"kubernetes.io/projected/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-kube-api-access-9k8vl\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.438622 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-config-data\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.445942 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-fernet-keys\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.448270 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-config-data\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.448778 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-combined-ca-bundle\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.457015 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k8vl\" (UniqueName: \"kubernetes.io/projected/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-kube-api-access-9k8vl\") pod \"keystone-cron-29415001-s26ld\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.478473 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:00 crc kubenswrapper[4665]: I1205 02:01:00.968791 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415001-s26ld"] Dec 05 02:01:01 crc kubenswrapper[4665]: I1205 02:01:01.544904 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415001-s26ld" event={"ID":"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa","Type":"ContainerStarted","Data":"8d3bf2e896c000c9dafe44a71dc485c5ed6bbe5fbdb62e0b22747e983cf708cb"} Dec 05 02:01:01 crc kubenswrapper[4665]: I1205 02:01:01.545612 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415001-s26ld" event={"ID":"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa","Type":"ContainerStarted","Data":"f71e3bc2b2fb29b78605f343fda56169b819193ad08861cdaff92986b34b6052"} Dec 05 02:01:01 crc kubenswrapper[4665]: I1205 02:01:01.567116 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415001-s26ld" podStartSLOduration=1.567100223 podStartE2EDuration="1.567100223s" podCreationTimestamp="2025-12-05 02:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 02:01:01.563079746 +0000 UTC m=+3036.902472045" watchObservedRunningTime="2025-12-05 02:01:01.567100223 +0000 UTC m=+3036.906492522" Dec 05 02:01:03 crc kubenswrapper[4665]: I1205 02:01:03.565710 4665 generic.go:334] "Generic (PLEG): container finished" podID="01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa" containerID="8d3bf2e896c000c9dafe44a71dc485c5ed6bbe5fbdb62e0b22747e983cf708cb" exitCode=0 Dec 05 02:01:03 crc kubenswrapper[4665]: I1205 02:01:03.565788 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415001-s26ld" event={"ID":"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa","Type":"ContainerDied","Data":"8d3bf2e896c000c9dafe44a71dc485c5ed6bbe5fbdb62e0b22747e983cf708cb"} Dec 05 02:01:04 crc kubenswrapper[4665]: I1205 02:01:04.901092 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:04 crc kubenswrapper[4665]: I1205 02:01:04.933913 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-fernet-keys\") pod \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " Dec 05 02:01:04 crc kubenswrapper[4665]: I1205 02:01:04.935166 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-combined-ca-bundle\") pod \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " Dec 05 02:01:04 crc kubenswrapper[4665]: I1205 02:01:04.935614 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9k8vl\" (UniqueName: \"kubernetes.io/projected/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-kube-api-access-9k8vl\") pod \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " Dec 05 02:01:04 crc kubenswrapper[4665]: I1205 02:01:04.935640 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-config-data\") pod \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\" (UID: \"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa\") " Dec 05 02:01:04 crc kubenswrapper[4665]: I1205 02:01:04.952643 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa" (UID: "01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:01:04 crc kubenswrapper[4665]: I1205 02:01:04.952920 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-kube-api-access-9k8vl" (OuterVolumeSpecName: "kube-api-access-9k8vl") pod "01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa" (UID: "01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa"). InnerVolumeSpecName "kube-api-access-9k8vl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:01:04 crc kubenswrapper[4665]: I1205 02:01:04.984623 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa" (UID: "01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:01:05 crc kubenswrapper[4665]: I1205 02:01:05.022721 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-config-data" (OuterVolumeSpecName: "config-data") pod "01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa" (UID: "01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:01:05 crc kubenswrapper[4665]: I1205 02:01:05.038228 4665 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 02:01:05 crc kubenswrapper[4665]: I1205 02:01:05.038543 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9k8vl\" (UniqueName: \"kubernetes.io/projected/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-kube-api-access-9k8vl\") on node \"crc\" DevicePath \"\"" Dec 05 02:01:05 crc kubenswrapper[4665]: I1205 02:01:05.038623 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 02:01:05 crc kubenswrapper[4665]: I1205 02:01:05.038688 4665 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 02:01:05 crc kubenswrapper[4665]: I1205 02:01:05.590903 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415001-s26ld" event={"ID":"01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa","Type":"ContainerDied","Data":"f71e3bc2b2fb29b78605f343fda56169b819193ad08861cdaff92986b34b6052"} Dec 05 02:01:05 crc kubenswrapper[4665]: I1205 02:01:05.590964 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415001-s26ld" Dec 05 02:01:05 crc kubenswrapper[4665]: I1205 02:01:05.591418 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f71e3bc2b2fb29b78605f343fda56169b819193ad08861cdaff92986b34b6052" Dec 05 02:01:14 crc kubenswrapper[4665]: I1205 02:01:14.922149 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:01:14 crc kubenswrapper[4665]: I1205 02:01:14.922620 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.161961 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vn6hg"] Dec 05 02:01:38 crc kubenswrapper[4665]: E1205 02:01:38.162831 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa" containerName="keystone-cron" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.162843 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa" containerName="keystone-cron" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.163034 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa" containerName="keystone-cron" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.164358 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.168852 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-utilities\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.168946 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-catalog-content\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.168997 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqcq7\" (UniqueName: \"kubernetes.io/projected/d9822906-c0ce-4795-9fde-1dd131507eaf-kube-api-access-tqcq7\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.178790 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vn6hg"] Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.269765 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-utilities\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.269830 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-catalog-content\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.269881 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqcq7\" (UniqueName: \"kubernetes.io/projected/d9822906-c0ce-4795-9fde-1dd131507eaf-kube-api-access-tqcq7\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.270255 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-utilities\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.270290 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-catalog-content\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.296225 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqcq7\" (UniqueName: \"kubernetes.io/projected/d9822906-c0ce-4795-9fde-1dd131507eaf-kube-api-access-tqcq7\") pod \"certified-operators-vn6hg\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:38 crc kubenswrapper[4665]: I1205 02:01:38.489441 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:39 crc kubenswrapper[4665]: I1205 02:01:39.670157 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vn6hg"] Dec 05 02:01:39 crc kubenswrapper[4665]: I1205 02:01:39.929403 4665 generic.go:334] "Generic (PLEG): container finished" podID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerID="3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024" exitCode=0 Dec 05 02:01:39 crc kubenswrapper[4665]: I1205 02:01:39.929660 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vn6hg" event={"ID":"d9822906-c0ce-4795-9fde-1dd131507eaf","Type":"ContainerDied","Data":"3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024"} Dec 05 02:01:39 crc kubenswrapper[4665]: I1205 02:01:39.929733 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vn6hg" event={"ID":"d9822906-c0ce-4795-9fde-1dd131507eaf","Type":"ContainerStarted","Data":"c4f104fedd988c47cb06d532583fd130762a51baf51512d1477e20ca485a5b83"} Dec 05 02:01:40 crc kubenswrapper[4665]: I1205 02:01:40.938056 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vn6hg" event={"ID":"d9822906-c0ce-4795-9fde-1dd131507eaf","Type":"ContainerStarted","Data":"8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4"} Dec 05 02:01:41 crc kubenswrapper[4665]: E1205 02:01:41.855468 4665 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9822906_c0ce_4795_9fde_1dd131507eaf.slice/crio-conmon-8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4.scope\": RecentStats: unable to find data in memory cache]" Dec 05 02:01:41 crc kubenswrapper[4665]: I1205 02:01:41.946676 4665 generic.go:334] "Generic (PLEG): container finished" podID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerID="8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4" exitCode=0 Dec 05 02:01:41 crc kubenswrapper[4665]: I1205 02:01:41.946730 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vn6hg" event={"ID":"d9822906-c0ce-4795-9fde-1dd131507eaf","Type":"ContainerDied","Data":"8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4"} Dec 05 02:01:42 crc kubenswrapper[4665]: I1205 02:01:42.956915 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vn6hg" event={"ID":"d9822906-c0ce-4795-9fde-1dd131507eaf","Type":"ContainerStarted","Data":"ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4"} Dec 05 02:01:42 crc kubenswrapper[4665]: I1205 02:01:42.979066 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vn6hg" podStartSLOduration=2.571735707 podStartE2EDuration="4.979047823s" podCreationTimestamp="2025-12-05 02:01:38 +0000 UTC" firstStartedPulling="2025-12-05 02:01:39.931652399 +0000 UTC m=+3075.271044698" lastFinishedPulling="2025-12-05 02:01:42.338964515 +0000 UTC m=+3077.678356814" observedRunningTime="2025-12-05 02:01:42.973943059 +0000 UTC m=+3078.313335358" watchObservedRunningTime="2025-12-05 02:01:42.979047823 +0000 UTC m=+3078.318440122" Dec 05 02:01:44 crc kubenswrapper[4665]: I1205 02:01:44.922387 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:01:44 crc kubenswrapper[4665]: I1205 02:01:44.922691 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:01:48 crc kubenswrapper[4665]: I1205 02:01:48.490098 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:48 crc kubenswrapper[4665]: I1205 02:01:48.490662 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:48 crc kubenswrapper[4665]: I1205 02:01:48.583970 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:49 crc kubenswrapper[4665]: I1205 02:01:49.074142 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:49 crc kubenswrapper[4665]: I1205 02:01:49.129197 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vn6hg"] Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.023700 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vn6hg" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerName="registry-server" containerID="cri-o://ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4" gracePeriod=2 Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.480552 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.550445 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-catalog-content\") pod \"d9822906-c0ce-4795-9fde-1dd131507eaf\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.550903 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqcq7\" (UniqueName: \"kubernetes.io/projected/d9822906-c0ce-4795-9fde-1dd131507eaf-kube-api-access-tqcq7\") pod \"d9822906-c0ce-4795-9fde-1dd131507eaf\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.550961 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-utilities\") pod \"d9822906-c0ce-4795-9fde-1dd131507eaf\" (UID: \"d9822906-c0ce-4795-9fde-1dd131507eaf\") " Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.551677 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-utilities" (OuterVolumeSpecName: "utilities") pod "d9822906-c0ce-4795-9fde-1dd131507eaf" (UID: "d9822906-c0ce-4795-9fde-1dd131507eaf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.560060 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d9822906-c0ce-4795-9fde-1dd131507eaf-kube-api-access-tqcq7" (OuterVolumeSpecName: "kube-api-access-tqcq7") pod "d9822906-c0ce-4795-9fde-1dd131507eaf" (UID: "d9822906-c0ce-4795-9fde-1dd131507eaf"). InnerVolumeSpecName "kube-api-access-tqcq7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.600452 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d9822906-c0ce-4795-9fde-1dd131507eaf" (UID: "d9822906-c0ce-4795-9fde-1dd131507eaf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.653482 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.653520 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqcq7\" (UniqueName: \"kubernetes.io/projected/d9822906-c0ce-4795-9fde-1dd131507eaf-kube-api-access-tqcq7\") on node \"crc\" DevicePath \"\"" Dec 05 02:01:51 crc kubenswrapper[4665]: I1205 02:01:51.653536 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d9822906-c0ce-4795-9fde-1dd131507eaf-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.057795 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vn6hg" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.057807 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vn6hg" event={"ID":"d9822906-c0ce-4795-9fde-1dd131507eaf","Type":"ContainerDied","Data":"ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4"} Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.057871 4665 scope.go:117] "RemoveContainer" containerID="ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.057715 4665 generic.go:334] "Generic (PLEG): container finished" podID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerID="ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4" exitCode=0 Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.058566 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vn6hg" event={"ID":"d9822906-c0ce-4795-9fde-1dd131507eaf","Type":"ContainerDied","Data":"c4f104fedd988c47cb06d532583fd130762a51baf51512d1477e20ca485a5b83"} Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.090162 4665 scope.go:117] "RemoveContainer" containerID="8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.096600 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vn6hg"] Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.107085 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vn6hg"] Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.119524 4665 scope.go:117] "RemoveContainer" containerID="3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.192913 4665 scope.go:117] "RemoveContainer" containerID="ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4" Dec 05 02:01:52 crc kubenswrapper[4665]: E1205 02:01:52.193724 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4\": container with ID starting with ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4 not found: ID does not exist" containerID="ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.193761 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4"} err="failed to get container status \"ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4\": rpc error: code = NotFound desc = could not find container \"ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4\": container with ID starting with ff9591a4eb76e61563db1072fad078b924519b0c6a8fbff37db100cc859bc6d4 not found: ID does not exist" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.193787 4665 scope.go:117] "RemoveContainer" containerID="8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4" Dec 05 02:01:52 crc kubenswrapper[4665]: E1205 02:01:52.194135 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4\": container with ID starting with 8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4 not found: ID does not exist" containerID="8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.194190 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4"} err="failed to get container status \"8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4\": rpc error: code = NotFound desc = could not find container \"8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4\": container with ID starting with 8a62779a3d5dc2a6bed45826679d277c3734d79979a56fc93bdfd9e3fac53de4 not found: ID does not exist" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.194216 4665 scope.go:117] "RemoveContainer" containerID="3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024" Dec 05 02:01:52 crc kubenswrapper[4665]: E1205 02:01:52.194516 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024\": container with ID starting with 3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024 not found: ID does not exist" containerID="3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.194551 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024"} err="failed to get container status \"3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024\": rpc error: code = NotFound desc = could not find container \"3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024\": container with ID starting with 3f2ffd7ae4d7f9187059e27ec07d7b958c6c37e69ffc145ff509a6de7b2a6024 not found: ID does not exist" Dec 05 02:01:52 crc kubenswrapper[4665]: I1205 02:01:52.907751 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" path="/var/lib/kubelet/pods/d9822906-c0ce-4795-9fde-1dd131507eaf/volumes" Dec 05 02:02:14 crc kubenswrapper[4665]: I1205 02:02:14.921918 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:02:14 crc kubenswrapper[4665]: I1205 02:02:14.922424 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:02:14 crc kubenswrapper[4665]: I1205 02:02:14.922476 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 02:02:14 crc kubenswrapper[4665]: I1205 02:02:14.923417 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 02:02:14 crc kubenswrapper[4665]: I1205 02:02:14.923482 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" gracePeriod=600 Dec 05 02:02:15 crc kubenswrapper[4665]: E1205 02:02:15.058164 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:02:15 crc kubenswrapper[4665]: I1205 02:02:15.299244 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" exitCode=0 Dec 05 02:02:15 crc kubenswrapper[4665]: I1205 02:02:15.299315 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224"} Dec 05 02:02:15 crc kubenswrapper[4665]: I1205 02:02:15.299357 4665 scope.go:117] "RemoveContainer" containerID="485c242ff54618088441ff4aa43b1f7d69f9ff4283c4ba36cd5f5eb843639d41" Dec 05 02:02:15 crc kubenswrapper[4665]: I1205 02:02:15.300061 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:02:15 crc kubenswrapper[4665]: E1205 02:02:15.300349 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.394713 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-f6886"] Dec 05 02:02:26 crc kubenswrapper[4665]: E1205 02:02:26.395573 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerName="extract-utilities" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.395586 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerName="extract-utilities" Dec 05 02:02:26 crc kubenswrapper[4665]: E1205 02:02:26.395608 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerName="registry-server" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.395614 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerName="registry-server" Dec 05 02:02:26 crc kubenswrapper[4665]: E1205 02:02:26.395642 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerName="extract-content" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.395648 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerName="extract-content" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.395811 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="d9822906-c0ce-4795-9fde-1dd131507eaf" containerName="registry-server" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.397132 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.409739 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6886"] Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.471833 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-utilities\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.471869 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-catalog-content\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.471975 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dl2zp\" (UniqueName: \"kubernetes.io/projected/a4d715b7-45fb-4889-8b1a-f040fc6c1387-kube-api-access-dl2zp\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.573089 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-utilities\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.573126 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-catalog-content\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.573231 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dl2zp\" (UniqueName: \"kubernetes.io/projected/a4d715b7-45fb-4889-8b1a-f040fc6c1387-kube-api-access-dl2zp\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.573660 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-utilities\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.573694 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-catalog-content\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.594943 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dl2zp\" (UniqueName: \"kubernetes.io/projected/a4d715b7-45fb-4889-8b1a-f040fc6c1387-kube-api-access-dl2zp\") pod \"community-operators-f6886\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:26 crc kubenswrapper[4665]: I1205 02:02:26.767637 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:27 crc kubenswrapper[4665]: I1205 02:02:27.323494 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-f6886"] Dec 05 02:02:27 crc kubenswrapper[4665]: W1205 02:02:27.328504 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda4d715b7_45fb_4889_8b1a_f040fc6c1387.slice/crio-fdfda25fc4604432e33d54958c965777bac56e7a835a76298339c0a591d454bc WatchSource:0}: Error finding container fdfda25fc4604432e33d54958c965777bac56e7a835a76298339c0a591d454bc: Status 404 returned error can't find the container with id fdfda25fc4604432e33d54958c965777bac56e7a835a76298339c0a591d454bc Dec 05 02:02:27 crc kubenswrapper[4665]: I1205 02:02:27.406897 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6886" event={"ID":"a4d715b7-45fb-4889-8b1a-f040fc6c1387","Type":"ContainerStarted","Data":"fdfda25fc4604432e33d54958c965777bac56e7a835a76298339c0a591d454bc"} Dec 05 02:02:28 crc kubenswrapper[4665]: I1205 02:02:28.420369 4665 generic.go:334] "Generic (PLEG): container finished" podID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerID="ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8" exitCode=0 Dec 05 02:02:28 crc kubenswrapper[4665]: I1205 02:02:28.420475 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6886" event={"ID":"a4d715b7-45fb-4889-8b1a-f040fc6c1387","Type":"ContainerDied","Data":"ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8"} Dec 05 02:02:28 crc kubenswrapper[4665]: I1205 02:02:28.423635 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 02:02:28 crc kubenswrapper[4665]: I1205 02:02:28.893961 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:02:28 crc kubenswrapper[4665]: E1205 02:02:28.894755 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:02:29 crc kubenswrapper[4665]: I1205 02:02:29.432218 4665 generic.go:334] "Generic (PLEG): container finished" podID="820a4267-c307-42cb-96cb-482a2919cfe7" containerID="3e39f3c51b9bd2546d7b15c42b1ead37405b8f1eb47d49b31e9eadb972c9dab8" exitCode=0 Dec 05 02:02:29 crc kubenswrapper[4665]: I1205 02:02:29.433065 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" event={"ID":"820a4267-c307-42cb-96cb-482a2919cfe7","Type":"ContainerDied","Data":"3e39f3c51b9bd2546d7b15c42b1ead37405b8f1eb47d49b31e9eadb972c9dab8"} Dec 05 02:02:29 crc kubenswrapper[4665]: I1205 02:02:29.435060 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6886" event={"ID":"a4d715b7-45fb-4889-8b1a-f040fc6c1387","Type":"ContainerStarted","Data":"186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e"} Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.445246 4665 generic.go:334] "Generic (PLEG): container finished" podID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerID="186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e" exitCode=0 Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.445407 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6886" event={"ID":"a4d715b7-45fb-4889-8b1a-f040fc6c1387","Type":"ContainerDied","Data":"186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e"} Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.856094 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.958879 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-inventory\") pod \"820a4267-c307-42cb-96cb-482a2919cfe7\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.958993 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ssh-key\") pod \"820a4267-c307-42cb-96cb-482a2919cfe7\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.959030 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-telemetry-combined-ca-bundle\") pod \"820a4267-c307-42cb-96cb-482a2919cfe7\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.959081 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-1\") pod \"820a4267-c307-42cb-96cb-482a2919cfe7\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.959109 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-2\") pod \"820a4267-c307-42cb-96cb-482a2919cfe7\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.959146 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-0\") pod \"820a4267-c307-42cb-96cb-482a2919cfe7\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.959172 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rh7q2\" (UniqueName: \"kubernetes.io/projected/820a4267-c307-42cb-96cb-482a2919cfe7-kube-api-access-rh7q2\") pod \"820a4267-c307-42cb-96cb-482a2919cfe7\" (UID: \"820a4267-c307-42cb-96cb-482a2919cfe7\") " Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.964454 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "820a4267-c307-42cb-96cb-482a2919cfe7" (UID: "820a4267-c307-42cb-96cb-482a2919cfe7"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.967393 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/820a4267-c307-42cb-96cb-482a2919cfe7-kube-api-access-rh7q2" (OuterVolumeSpecName: "kube-api-access-rh7q2") pod "820a4267-c307-42cb-96cb-482a2919cfe7" (UID: "820a4267-c307-42cb-96cb-482a2919cfe7"). InnerVolumeSpecName "kube-api-access-rh7q2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:02:30 crc kubenswrapper[4665]: I1205 02:02:30.992786 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-inventory" (OuterVolumeSpecName: "inventory") pod "820a4267-c307-42cb-96cb-482a2919cfe7" (UID: "820a4267-c307-42cb-96cb-482a2919cfe7"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.006595 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "820a4267-c307-42cb-96cb-482a2919cfe7" (UID: "820a4267-c307-42cb-96cb-482a2919cfe7"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.008676 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "820a4267-c307-42cb-96cb-482a2919cfe7" (UID: "820a4267-c307-42cb-96cb-482a2919cfe7"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.021507 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "820a4267-c307-42cb-96cb-482a2919cfe7" (UID: "820a4267-c307-42cb-96cb-482a2919cfe7"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.022532 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "820a4267-c307-42cb-96cb-482a2919cfe7" (UID: "820a4267-c307-42cb-96cb-482a2919cfe7"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.060833 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.060865 4665 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.060876 4665 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.060888 4665 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.060898 4665 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.060911 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rh7q2\" (UniqueName: \"kubernetes.io/projected/820a4267-c307-42cb-96cb-482a2919cfe7-kube-api-access-rh7q2\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.060920 4665 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/820a4267-c307-42cb-96cb-482a2919cfe7-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.457081 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" event={"ID":"820a4267-c307-42cb-96cb-482a2919cfe7","Type":"ContainerDied","Data":"3c0ffed29f83533f8f278b1702d15f30aa3fe3419eb14d2ca57cff29f643cc13"} Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.457455 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3c0ffed29f83533f8f278b1702d15f30aa3fe3419eb14d2ca57cff29f643cc13" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.457477 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-hrk76" Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.459694 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6886" event={"ID":"a4d715b7-45fb-4889-8b1a-f040fc6c1387","Type":"ContainerStarted","Data":"bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba"} Dec 05 02:02:31 crc kubenswrapper[4665]: I1205 02:02:31.505770 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-f6886" podStartSLOduration=3.092840229 podStartE2EDuration="5.505740189s" podCreationTimestamp="2025-12-05 02:02:26 +0000 UTC" firstStartedPulling="2025-12-05 02:02:28.423366009 +0000 UTC m=+3123.762758328" lastFinishedPulling="2025-12-05 02:02:30.836265989 +0000 UTC m=+3126.175658288" observedRunningTime="2025-12-05 02:02:31.497698153 +0000 UTC m=+3126.837090452" watchObservedRunningTime="2025-12-05 02:02:31.505740189 +0000 UTC m=+3126.845132488" Dec 05 02:02:36 crc kubenswrapper[4665]: I1205 02:02:36.768545 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:36 crc kubenswrapper[4665]: I1205 02:02:36.769023 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:36 crc kubenswrapper[4665]: I1205 02:02:36.850328 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:37 crc kubenswrapper[4665]: I1205 02:02:37.570558 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:37 crc kubenswrapper[4665]: I1205 02:02:37.627709 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6886"] Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.494481 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pbvx7"] Dec 05 02:02:39 crc kubenswrapper[4665]: E1205 02:02:39.495250 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="820a4267-c307-42cb-96cb-482a2919cfe7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.496751 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="820a4267-c307-42cb-96cb-482a2919cfe7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.497220 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="820a4267-c307-42cb-96cb-482a2919cfe7" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.501817 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.561268 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pbvx7"] Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.566680 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-f6886" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerName="registry-server" containerID="cri-o://bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba" gracePeriod=2 Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.583393 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cp7g\" (UniqueName: \"kubernetes.io/projected/b75646e6-c710-4b48-b18e-5ec1100960be-kube-api-access-5cp7g\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.583490 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-utilities\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.583514 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-catalog-content\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.686059 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cp7g\" (UniqueName: \"kubernetes.io/projected/b75646e6-c710-4b48-b18e-5ec1100960be-kube-api-access-5cp7g\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.686508 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-utilities\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.687363 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-catalog-content\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.687322 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-utilities\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.687865 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-catalog-content\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.707261 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cp7g\" (UniqueName: \"kubernetes.io/projected/b75646e6-c710-4b48-b18e-5ec1100960be-kube-api-access-5cp7g\") pod \"redhat-marketplace-pbvx7\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:39 crc kubenswrapper[4665]: I1205 02:02:39.843032 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.065457 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.106692 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dl2zp\" (UniqueName: \"kubernetes.io/projected/a4d715b7-45fb-4889-8b1a-f040fc6c1387-kube-api-access-dl2zp\") pod \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.106923 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-catalog-content\") pod \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.107071 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-utilities\") pod \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\" (UID: \"a4d715b7-45fb-4889-8b1a-f040fc6c1387\") " Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.110683 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-utilities" (OuterVolumeSpecName: "utilities") pod "a4d715b7-45fb-4889-8b1a-f040fc6c1387" (UID: "a4d715b7-45fb-4889-8b1a-f040fc6c1387"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.120220 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a4d715b7-45fb-4889-8b1a-f040fc6c1387-kube-api-access-dl2zp" (OuterVolumeSpecName: "kube-api-access-dl2zp") pod "a4d715b7-45fb-4889-8b1a-f040fc6c1387" (UID: "a4d715b7-45fb-4889-8b1a-f040fc6c1387"). InnerVolumeSpecName "kube-api-access-dl2zp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.168912 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a4d715b7-45fb-4889-8b1a-f040fc6c1387" (UID: "a4d715b7-45fb-4889-8b1a-f040fc6c1387"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.209237 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.209265 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a4d715b7-45fb-4889-8b1a-f040fc6c1387-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.209276 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dl2zp\" (UniqueName: \"kubernetes.io/projected/a4d715b7-45fb-4889-8b1a-f040fc6c1387-kube-api-access-dl2zp\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.424554 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pbvx7"] Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.581678 4665 generic.go:334] "Generic (PLEG): container finished" podID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerID="bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba" exitCode=0 Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.581739 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-f6886" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.581760 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6886" event={"ID":"a4d715b7-45fb-4889-8b1a-f040fc6c1387","Type":"ContainerDied","Data":"bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba"} Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.581790 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-f6886" event={"ID":"a4d715b7-45fb-4889-8b1a-f040fc6c1387","Type":"ContainerDied","Data":"fdfda25fc4604432e33d54958c965777bac56e7a835a76298339c0a591d454bc"} Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.581810 4665 scope.go:117] "RemoveContainer" containerID="bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.586049 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbvx7" event={"ID":"b75646e6-c710-4b48-b18e-5ec1100960be","Type":"ContainerStarted","Data":"1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de"} Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.586085 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbvx7" event={"ID":"b75646e6-c710-4b48-b18e-5ec1100960be","Type":"ContainerStarted","Data":"ac69650d8f327156452598881df0fe08cf62a683090ef00363a100b7cd086d64"} Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.633981 4665 scope.go:117] "RemoveContainer" containerID="186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.639197 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-f6886"] Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.646929 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-f6886"] Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.660673 4665 scope.go:117] "RemoveContainer" containerID="ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.683604 4665 scope.go:117] "RemoveContainer" containerID="bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba" Dec 05 02:02:40 crc kubenswrapper[4665]: E1205 02:02:40.684072 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba\": container with ID starting with bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba not found: ID does not exist" containerID="bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.684106 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba"} err="failed to get container status \"bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba\": rpc error: code = NotFound desc = could not find container \"bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba\": container with ID starting with bd4021003bcf75d9db9559e7cd95501a108b3fbd7a1cfe0466445fb6527934ba not found: ID does not exist" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.684131 4665 scope.go:117] "RemoveContainer" containerID="186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e" Dec 05 02:02:40 crc kubenswrapper[4665]: E1205 02:02:40.684585 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e\": container with ID starting with 186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e not found: ID does not exist" containerID="186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.684613 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e"} err="failed to get container status \"186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e\": rpc error: code = NotFound desc = could not find container \"186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e\": container with ID starting with 186aab2ce79183d094e0546084eea126f233bd7eca2de63f5d7eb5b65b4d316e not found: ID does not exist" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.684633 4665 scope.go:117] "RemoveContainer" containerID="ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8" Dec 05 02:02:40 crc kubenswrapper[4665]: E1205 02:02:40.684921 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8\": container with ID starting with ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8 not found: ID does not exist" containerID="ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.684947 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8"} err="failed to get container status \"ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8\": rpc error: code = NotFound desc = could not find container \"ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8\": container with ID starting with ce1623b5cf4b330d2924a073816737fab840e13fb937f2c4c5845b9a0a2714c8 not found: ID does not exist" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.893388 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:02:40 crc kubenswrapper[4665]: E1205 02:02:40.893871 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:02:40 crc kubenswrapper[4665]: I1205 02:02:40.904794 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" path="/var/lib/kubelet/pods/a4d715b7-45fb-4889-8b1a-f040fc6c1387/volumes" Dec 05 02:02:41 crc kubenswrapper[4665]: I1205 02:02:41.596348 4665 generic.go:334] "Generic (PLEG): container finished" podID="b75646e6-c710-4b48-b18e-5ec1100960be" containerID="1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de" exitCode=0 Dec 05 02:02:41 crc kubenswrapper[4665]: I1205 02:02:41.596391 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbvx7" event={"ID":"b75646e6-c710-4b48-b18e-5ec1100960be","Type":"ContainerDied","Data":"1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de"} Dec 05 02:02:42 crc kubenswrapper[4665]: I1205 02:02:42.608065 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbvx7" event={"ID":"b75646e6-c710-4b48-b18e-5ec1100960be","Type":"ContainerStarted","Data":"2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959"} Dec 05 02:02:43 crc kubenswrapper[4665]: I1205 02:02:43.618408 4665 generic.go:334] "Generic (PLEG): container finished" podID="b75646e6-c710-4b48-b18e-5ec1100960be" containerID="2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959" exitCode=0 Dec 05 02:02:43 crc kubenswrapper[4665]: I1205 02:02:43.618501 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbvx7" event={"ID":"b75646e6-c710-4b48-b18e-5ec1100960be","Type":"ContainerDied","Data":"2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959"} Dec 05 02:02:44 crc kubenswrapper[4665]: I1205 02:02:44.631360 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbvx7" event={"ID":"b75646e6-c710-4b48-b18e-5ec1100960be","Type":"ContainerStarted","Data":"896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d"} Dec 05 02:02:44 crc kubenswrapper[4665]: I1205 02:02:44.648819 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pbvx7" podStartSLOduration=3.207506442 podStartE2EDuration="5.648801341s" podCreationTimestamp="2025-12-05 02:02:39 +0000 UTC" firstStartedPulling="2025-12-05 02:02:41.598132478 +0000 UTC m=+3136.937524777" lastFinishedPulling="2025-12-05 02:02:44.039427377 +0000 UTC m=+3139.378819676" observedRunningTime="2025-12-05 02:02:44.646060844 +0000 UTC m=+3139.985453163" watchObservedRunningTime="2025-12-05 02:02:44.648801341 +0000 UTC m=+3139.988193630" Dec 05 02:02:49 crc kubenswrapper[4665]: I1205 02:02:49.845632 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:49 crc kubenswrapper[4665]: I1205 02:02:49.846195 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:49 crc kubenswrapper[4665]: I1205 02:02:49.902480 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:50 crc kubenswrapper[4665]: I1205 02:02:50.757552 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:50 crc kubenswrapper[4665]: I1205 02:02:50.803243 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pbvx7"] Dec 05 02:02:52 crc kubenswrapper[4665]: I1205 02:02:52.703735 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pbvx7" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" containerName="registry-server" containerID="cri-o://896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d" gracePeriod=2 Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.159611 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.185087 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cp7g\" (UniqueName: \"kubernetes.io/projected/b75646e6-c710-4b48-b18e-5ec1100960be-kube-api-access-5cp7g\") pod \"b75646e6-c710-4b48-b18e-5ec1100960be\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.186485 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-catalog-content\") pod \"b75646e6-c710-4b48-b18e-5ec1100960be\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.192771 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b75646e6-c710-4b48-b18e-5ec1100960be-kube-api-access-5cp7g" (OuterVolumeSpecName: "kube-api-access-5cp7g") pod "b75646e6-c710-4b48-b18e-5ec1100960be" (UID: "b75646e6-c710-4b48-b18e-5ec1100960be"). InnerVolumeSpecName "kube-api-access-5cp7g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.212820 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-utilities\") pod \"b75646e6-c710-4b48-b18e-5ec1100960be\" (UID: \"b75646e6-c710-4b48-b18e-5ec1100960be\") " Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.213535 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-utilities" (OuterVolumeSpecName: "utilities") pod "b75646e6-c710-4b48-b18e-5ec1100960be" (UID: "b75646e6-c710-4b48-b18e-5ec1100960be"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.213633 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cp7g\" (UniqueName: \"kubernetes.io/projected/b75646e6-c710-4b48-b18e-5ec1100960be-kube-api-access-5cp7g\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.222084 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b75646e6-c710-4b48-b18e-5ec1100960be" (UID: "b75646e6-c710-4b48-b18e-5ec1100960be"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.314781 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.314811 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b75646e6-c710-4b48-b18e-5ec1100960be-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.716430 4665 generic.go:334] "Generic (PLEG): container finished" podID="b75646e6-c710-4b48-b18e-5ec1100960be" containerID="896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d" exitCode=0 Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.716476 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbvx7" event={"ID":"b75646e6-c710-4b48-b18e-5ec1100960be","Type":"ContainerDied","Data":"896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d"} Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.716511 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pbvx7" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.716558 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pbvx7" event={"ID":"b75646e6-c710-4b48-b18e-5ec1100960be","Type":"ContainerDied","Data":"ac69650d8f327156452598881df0fe08cf62a683090ef00363a100b7cd086d64"} Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.716591 4665 scope.go:117] "RemoveContainer" containerID="896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.754786 4665 scope.go:117] "RemoveContainer" containerID="2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.757352 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pbvx7"] Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.764891 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pbvx7"] Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.794830 4665 scope.go:117] "RemoveContainer" containerID="1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.822716 4665 scope.go:117] "RemoveContainer" containerID="896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d" Dec 05 02:02:53 crc kubenswrapper[4665]: E1205 02:02:53.824713 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d\": container with ID starting with 896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d not found: ID does not exist" containerID="896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.824760 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d"} err="failed to get container status \"896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d\": rpc error: code = NotFound desc = could not find container \"896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d\": container with ID starting with 896b77ae7a49d46ac266880069e72296ca705473e54aa227ae3c9f4a7083a67d not found: ID does not exist" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.824790 4665 scope.go:117] "RemoveContainer" containerID="2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959" Dec 05 02:02:53 crc kubenswrapper[4665]: E1205 02:02:53.825184 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959\": container with ID starting with 2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959 not found: ID does not exist" containerID="2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.825212 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959"} err="failed to get container status \"2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959\": rpc error: code = NotFound desc = could not find container \"2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959\": container with ID starting with 2d855f02d2022a637794b3e9ed2aa9ba72a4793c2b2e0c49a36594076b805959 not found: ID does not exist" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.825230 4665 scope.go:117] "RemoveContainer" containerID="1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de" Dec 05 02:02:53 crc kubenswrapper[4665]: E1205 02:02:53.825577 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de\": container with ID starting with 1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de not found: ID does not exist" containerID="1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de" Dec 05 02:02:53 crc kubenswrapper[4665]: I1205 02:02:53.825639 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de"} err="failed to get container status \"1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de\": rpc error: code = NotFound desc = could not find container \"1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de\": container with ID starting with 1f34d903cff055de7b790161185656c06320fa47248d2b0c80254b9f056ad4de not found: ID does not exist" Dec 05 02:02:54 crc kubenswrapper[4665]: I1205 02:02:54.893439 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:02:54 crc kubenswrapper[4665]: E1205 02:02:54.893784 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:02:54 crc kubenswrapper[4665]: I1205 02:02:54.907627 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" path="/var/lib/kubelet/pods/b75646e6-c710-4b48-b18e-5ec1100960be/volumes" Dec 05 02:03:08 crc kubenswrapper[4665]: I1205 02:03:08.894282 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:03:08 crc kubenswrapper[4665]: E1205 02:03:08.895544 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:03:19 crc kubenswrapper[4665]: I1205 02:03:19.894173 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:03:19 crc kubenswrapper[4665]: E1205 02:03:19.895159 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.827486 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 02:03:29 crc kubenswrapper[4665]: E1205 02:03:29.828416 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerName="extract-utilities" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.828432 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerName="extract-utilities" Dec 05 02:03:29 crc kubenswrapper[4665]: E1205 02:03:29.828463 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" containerName="registry-server" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.828469 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" containerName="registry-server" Dec 05 02:03:29 crc kubenswrapper[4665]: E1205 02:03:29.828480 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" containerName="extract-utilities" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.828486 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" containerName="extract-utilities" Dec 05 02:03:29 crc kubenswrapper[4665]: E1205 02:03:29.828502 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerName="extract-content" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.828508 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerName="extract-content" Dec 05 02:03:29 crc kubenswrapper[4665]: E1205 02:03:29.828518 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" containerName="extract-content" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.828524 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" containerName="extract-content" Dec 05 02:03:29 crc kubenswrapper[4665]: E1205 02:03:29.828538 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerName="registry-server" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.828543 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerName="registry-server" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.828731 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="b75646e6-c710-4b48-b18e-5ec1100960be" containerName="registry-server" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.828743 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a4d715b7-45fb-4889-8b1a-f040fc6c1387" containerName="registry-server" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.829388 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.831220 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.832225 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.832269 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.832719 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-6sdph" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.838712 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.838839 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.838909 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-config-data\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.840617 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.941465 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.941902 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.941945 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccwvt\" (UniqueName: \"kubernetes.io/projected/a3dcea46-0cf1-441d-84ba-0b327c396844-kube-api-access-ccwvt\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.941997 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.942077 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.942283 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.942361 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.942412 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.942451 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-config-data\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.943231 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.944001 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-config-data\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:29 crc kubenswrapper[4665]: I1205 02:03:29.953876 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.043968 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.044038 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.044064 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccwvt\" (UniqueName: \"kubernetes.io/projected/a3dcea46-0cf1-441d-84ba-0b327c396844-kube-api-access-ccwvt\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.044099 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.044156 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.044196 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.044560 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.044764 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.045232 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.047972 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.048062 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.064254 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccwvt\" (UniqueName: \"kubernetes.io/projected/a3dcea46-0cf1-441d-84ba-0b327c396844-kube-api-access-ccwvt\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.083658 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"tempest-tests-tempest\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.151843 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 02:03:30 crc kubenswrapper[4665]: I1205 02:03:30.614223 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 02:03:31 crc kubenswrapper[4665]: I1205 02:03:31.073882 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a3dcea46-0cf1-441d-84ba-0b327c396844","Type":"ContainerStarted","Data":"6a7a4ca74103ec0386864df1bf305d5d63d2648735a2a89ba040c2779e71a22d"} Dec 05 02:03:33 crc kubenswrapper[4665]: I1205 02:03:33.893288 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:03:33 crc kubenswrapper[4665]: E1205 02:03:33.893879 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:03:44 crc kubenswrapper[4665]: I1205 02:03:44.902555 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:03:44 crc kubenswrapper[4665]: E1205 02:03:44.903243 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:03:55 crc kubenswrapper[4665]: I1205 02:03:55.894205 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:03:55 crc kubenswrapper[4665]: E1205 02:03:55.895013 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:04:07 crc kubenswrapper[4665]: E1205 02:04:07.613275 4665 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 05 02:04:07 crc kubenswrapper[4665]: E1205 02:04:07.615808 4665 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ccwvt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(a3dcea46-0cf1-441d-84ba-0b327c396844): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 02:04:07 crc kubenswrapper[4665]: E1205 02:04:07.620399 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="a3dcea46-0cf1-441d-84ba-0b327c396844" Dec 05 02:04:08 crc kubenswrapper[4665]: E1205 02:04:08.418773 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="a3dcea46-0cf1-441d-84ba-0b327c396844" Dec 05 02:04:09 crc kubenswrapper[4665]: I1205 02:04:09.894648 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:04:09 crc kubenswrapper[4665]: E1205 02:04:09.895567 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:04:21 crc kubenswrapper[4665]: I1205 02:04:21.893443 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:04:21 crc kubenswrapper[4665]: E1205 02:04:21.894325 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:04:25 crc kubenswrapper[4665]: I1205 02:04:25.572125 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a3dcea46-0cf1-441d-84ba-0b327c396844","Type":"ContainerStarted","Data":"e000742542e570cc0d15119403498d4ede5b0c740ca856a4e9675b72d86ca4d4"} Dec 05 02:04:25 crc kubenswrapper[4665]: I1205 02:04:25.599825 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=3.868276918 podStartE2EDuration="57.599805497s" podCreationTimestamp="2025-12-05 02:03:28 +0000 UTC" firstStartedPulling="2025-12-05 02:03:30.628719953 +0000 UTC m=+3185.968112252" lastFinishedPulling="2025-12-05 02:04:24.360248532 +0000 UTC m=+3239.699640831" observedRunningTime="2025-12-05 02:04:25.592817698 +0000 UTC m=+3240.932210007" watchObservedRunningTime="2025-12-05 02:04:25.599805497 +0000 UTC m=+3240.939197806" Dec 05 02:04:34 crc kubenswrapper[4665]: I1205 02:04:34.909988 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:04:34 crc kubenswrapper[4665]: E1205 02:04:34.910891 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:04:48 crc kubenswrapper[4665]: I1205 02:04:48.893726 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:04:48 crc kubenswrapper[4665]: E1205 02:04:48.894572 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:05:00 crc kubenswrapper[4665]: I1205 02:05:00.893736 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:05:00 crc kubenswrapper[4665]: E1205 02:05:00.894644 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:05:13 crc kubenswrapper[4665]: I1205 02:05:13.894447 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:05:13 crc kubenswrapper[4665]: E1205 02:05:13.895199 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:05:26 crc kubenswrapper[4665]: I1205 02:05:26.893580 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:05:26 crc kubenswrapper[4665]: E1205 02:05:26.894251 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:05:38 crc kubenswrapper[4665]: I1205 02:05:38.894331 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:05:38 crc kubenswrapper[4665]: E1205 02:05:38.895177 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:05:51 crc kubenswrapper[4665]: I1205 02:05:51.893491 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:05:51 crc kubenswrapper[4665]: E1205 02:05:51.894317 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:06:03 crc kubenswrapper[4665]: I1205 02:06:03.893711 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:06:03 crc kubenswrapper[4665]: E1205 02:06:03.894703 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:06:17 crc kubenswrapper[4665]: I1205 02:06:17.894219 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:06:17 crc kubenswrapper[4665]: E1205 02:06:17.895169 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:06:32 crc kubenswrapper[4665]: I1205 02:06:32.894306 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:06:32 crc kubenswrapper[4665]: E1205 02:06:32.894929 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:06:44 crc kubenswrapper[4665]: I1205 02:06:44.901591 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:06:44 crc kubenswrapper[4665]: E1205 02:06:44.905556 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.695598 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-njzxb"] Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.697892 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.727845 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-njzxb"] Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.745153 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-utilities\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.745207 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lgc5b\" (UniqueName: \"kubernetes.io/projected/58e8f7b0-8c1e-460c-9c61-25ec827bd405-kube-api-access-lgc5b\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.745433 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-catalog-content\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.846593 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-catalog-content\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.846677 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-utilities\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.846711 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lgc5b\" (UniqueName: \"kubernetes.io/projected/58e8f7b0-8c1e-460c-9c61-25ec827bd405-kube-api-access-lgc5b\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.847240 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-catalog-content\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.847320 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-utilities\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.876441 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lgc5b\" (UniqueName: \"kubernetes.io/projected/58e8f7b0-8c1e-460c-9c61-25ec827bd405-kube-api-access-lgc5b\") pod \"redhat-operators-njzxb\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:06:59 crc kubenswrapper[4665]: I1205 02:06:59.893192 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:06:59 crc kubenswrapper[4665]: E1205 02:06:59.893563 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:07:00 crc kubenswrapper[4665]: I1205 02:07:00.023652 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:07:00 crc kubenswrapper[4665]: I1205 02:07:00.701230 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-njzxb"] Dec 05 02:07:00 crc kubenswrapper[4665]: W1205 02:07:00.718180 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod58e8f7b0_8c1e_460c_9c61_25ec827bd405.slice/crio-1356c0e859fe4d713c224ae79d3cbfb9fb1264ba090b41aa66278a1fa1067b3f WatchSource:0}: Error finding container 1356c0e859fe4d713c224ae79d3cbfb9fb1264ba090b41aa66278a1fa1067b3f: Status 404 returned error can't find the container with id 1356c0e859fe4d713c224ae79d3cbfb9fb1264ba090b41aa66278a1fa1067b3f Dec 05 02:07:00 crc kubenswrapper[4665]: I1205 02:07:00.951250 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njzxb" event={"ID":"58e8f7b0-8c1e-460c-9c61-25ec827bd405","Type":"ContainerStarted","Data":"03c8b1b288ad58f74ae6873213a2006ca9cc8bc05bde8093b7d2c0c160064714"} Dec 05 02:07:00 crc kubenswrapper[4665]: I1205 02:07:00.951527 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njzxb" event={"ID":"58e8f7b0-8c1e-460c-9c61-25ec827bd405","Type":"ContainerStarted","Data":"1356c0e859fe4d713c224ae79d3cbfb9fb1264ba090b41aa66278a1fa1067b3f"} Dec 05 02:07:01 crc kubenswrapper[4665]: I1205 02:07:01.962042 4665 generic.go:334] "Generic (PLEG): container finished" podID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerID="03c8b1b288ad58f74ae6873213a2006ca9cc8bc05bde8093b7d2c0c160064714" exitCode=0 Dec 05 02:07:01 crc kubenswrapper[4665]: I1205 02:07:01.962419 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njzxb" event={"ID":"58e8f7b0-8c1e-460c-9c61-25ec827bd405","Type":"ContainerDied","Data":"03c8b1b288ad58f74ae6873213a2006ca9cc8bc05bde8093b7d2c0c160064714"} Dec 05 02:07:02 crc kubenswrapper[4665]: I1205 02:07:02.975335 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njzxb" event={"ID":"58e8f7b0-8c1e-460c-9c61-25ec827bd405","Type":"ContainerStarted","Data":"df9795574d1feae462cf505d3da61d5d2aa5b31bb3f80be18a9728863f3b6589"} Dec 05 02:07:04 crc kubenswrapper[4665]: I1205 02:07:04.993640 4665 generic.go:334] "Generic (PLEG): container finished" podID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerID="df9795574d1feae462cf505d3da61d5d2aa5b31bb3f80be18a9728863f3b6589" exitCode=0 Dec 05 02:07:04 crc kubenswrapper[4665]: I1205 02:07:04.993723 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njzxb" event={"ID":"58e8f7b0-8c1e-460c-9c61-25ec827bd405","Type":"ContainerDied","Data":"df9795574d1feae462cf505d3da61d5d2aa5b31bb3f80be18a9728863f3b6589"} Dec 05 02:07:07 crc kubenswrapper[4665]: I1205 02:07:07.017009 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njzxb" event={"ID":"58e8f7b0-8c1e-460c-9c61-25ec827bd405","Type":"ContainerStarted","Data":"75843c23998f049a8a247818be8f482170b57d04f9ee173282b3263c261504e3"} Dec 05 02:07:07 crc kubenswrapper[4665]: I1205 02:07:07.037855 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-njzxb" podStartSLOduration=3.475029833 podStartE2EDuration="8.037837633s" podCreationTimestamp="2025-12-05 02:06:59 +0000 UTC" firstStartedPulling="2025-12-05 02:07:00.952634271 +0000 UTC m=+3396.292026570" lastFinishedPulling="2025-12-05 02:07:05.515442071 +0000 UTC m=+3400.854834370" observedRunningTime="2025-12-05 02:07:07.033534739 +0000 UTC m=+3402.372927038" watchObservedRunningTime="2025-12-05 02:07:07.037837633 +0000 UTC m=+3402.377229932" Dec 05 02:07:10 crc kubenswrapper[4665]: I1205 02:07:10.024187 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:07:10 crc kubenswrapper[4665]: I1205 02:07:10.026050 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:07:11 crc kubenswrapper[4665]: I1205 02:07:11.088786 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-njzxb" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="registry-server" probeResult="failure" output=< Dec 05 02:07:11 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 02:07:11 crc kubenswrapper[4665]: > Dec 05 02:07:11 crc kubenswrapper[4665]: I1205 02:07:11.893463 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:07:11 crc kubenswrapper[4665]: E1205 02:07:11.893748 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:07:20 crc kubenswrapper[4665]: I1205 02:07:20.097252 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:07:20 crc kubenswrapper[4665]: I1205 02:07:20.151088 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:07:21 crc kubenswrapper[4665]: I1205 02:07:21.776025 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-njzxb"] Dec 05 02:07:21 crc kubenswrapper[4665]: I1205 02:07:21.777615 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-njzxb" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="registry-server" containerID="cri-o://75843c23998f049a8a247818be8f482170b57d04f9ee173282b3263c261504e3" gracePeriod=2 Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.167477 4665 generic.go:334] "Generic (PLEG): container finished" podID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerID="75843c23998f049a8a247818be8f482170b57d04f9ee173282b3263c261504e3" exitCode=0 Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.167793 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njzxb" event={"ID":"58e8f7b0-8c1e-460c-9c61-25ec827bd405","Type":"ContainerDied","Data":"75843c23998f049a8a247818be8f482170b57d04f9ee173282b3263c261504e3"} Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.421875 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.518670 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-utilities\") pod \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.518913 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-catalog-content\") pod \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.518951 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lgc5b\" (UniqueName: \"kubernetes.io/projected/58e8f7b0-8c1e-460c-9c61-25ec827bd405-kube-api-access-lgc5b\") pod \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\" (UID: \"58e8f7b0-8c1e-460c-9c61-25ec827bd405\") " Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.519909 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-utilities" (OuterVolumeSpecName: "utilities") pod "58e8f7b0-8c1e-460c-9c61-25ec827bd405" (UID: "58e8f7b0-8c1e-460c-9c61-25ec827bd405"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.548824 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58e8f7b0-8c1e-460c-9c61-25ec827bd405-kube-api-access-lgc5b" (OuterVolumeSpecName: "kube-api-access-lgc5b") pod "58e8f7b0-8c1e-460c-9c61-25ec827bd405" (UID: "58e8f7b0-8c1e-460c-9c61-25ec827bd405"). InnerVolumeSpecName "kube-api-access-lgc5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.621511 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lgc5b\" (UniqueName: \"kubernetes.io/projected/58e8f7b0-8c1e-460c-9c61-25ec827bd405-kube-api-access-lgc5b\") on node \"crc\" DevicePath \"\"" Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.621549 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.630086 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58e8f7b0-8c1e-460c-9c61-25ec827bd405" (UID: "58e8f7b0-8c1e-460c-9c61-25ec827bd405"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:07:22 crc kubenswrapper[4665]: I1205 02:07:22.723274 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58e8f7b0-8c1e-460c-9c61-25ec827bd405-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:07:23 crc kubenswrapper[4665]: I1205 02:07:23.177995 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-njzxb" event={"ID":"58e8f7b0-8c1e-460c-9c61-25ec827bd405","Type":"ContainerDied","Data":"1356c0e859fe4d713c224ae79d3cbfb9fb1264ba090b41aa66278a1fa1067b3f"} Dec 05 02:07:23 crc kubenswrapper[4665]: I1205 02:07:23.178041 4665 scope.go:117] "RemoveContainer" containerID="75843c23998f049a8a247818be8f482170b57d04f9ee173282b3263c261504e3" Dec 05 02:07:23 crc kubenswrapper[4665]: I1205 02:07:23.178209 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-njzxb" Dec 05 02:07:23 crc kubenswrapper[4665]: I1205 02:07:23.202279 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-njzxb"] Dec 05 02:07:23 crc kubenswrapper[4665]: I1205 02:07:23.207149 4665 scope.go:117] "RemoveContainer" containerID="df9795574d1feae462cf505d3da61d5d2aa5b31bb3f80be18a9728863f3b6589" Dec 05 02:07:23 crc kubenswrapper[4665]: I1205 02:07:23.216754 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-njzxb"] Dec 05 02:07:23 crc kubenswrapper[4665]: I1205 02:07:23.230117 4665 scope.go:117] "RemoveContainer" containerID="03c8b1b288ad58f74ae6873213a2006ca9cc8bc05bde8093b7d2c0c160064714" Dec 05 02:07:23 crc kubenswrapper[4665]: I1205 02:07:23.893811 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:07:24 crc kubenswrapper[4665]: I1205 02:07:24.914970 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" path="/var/lib/kubelet/pods/58e8f7b0-8c1e-460c-9c61-25ec827bd405/volumes" Dec 05 02:07:25 crc kubenswrapper[4665]: I1205 02:07:25.219328 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"9d031f70e8ccc70a0b8d1b637fb5ab62e2ead5a4a2c6250917b8cd69adbf52c6"} Dec 05 02:09:44 crc kubenswrapper[4665]: I1205 02:09:44.924063 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:09:44 crc kubenswrapper[4665]: I1205 02:09:44.924924 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:10:14 crc kubenswrapper[4665]: I1205 02:10:14.922732 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:10:14 crc kubenswrapper[4665]: I1205 02:10:14.923208 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:10:44 crc kubenswrapper[4665]: I1205 02:10:44.922086 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:10:44 crc kubenswrapper[4665]: I1205 02:10:44.922613 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:10:44 crc kubenswrapper[4665]: I1205 02:10:44.922660 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 02:10:44 crc kubenswrapper[4665]: I1205 02:10:44.923453 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9d031f70e8ccc70a0b8d1b637fb5ab62e2ead5a4a2c6250917b8cd69adbf52c6"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 02:10:44 crc kubenswrapper[4665]: I1205 02:10:44.923508 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://9d031f70e8ccc70a0b8d1b637fb5ab62e2ead5a4a2c6250917b8cd69adbf52c6" gracePeriod=600 Dec 05 02:10:45 crc kubenswrapper[4665]: I1205 02:10:45.914667 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="9d031f70e8ccc70a0b8d1b637fb5ab62e2ead5a4a2c6250917b8cd69adbf52c6" exitCode=0 Dec 05 02:10:45 crc kubenswrapper[4665]: I1205 02:10:45.915186 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"9d031f70e8ccc70a0b8d1b637fb5ab62e2ead5a4a2c6250917b8cd69adbf52c6"} Dec 05 02:10:45 crc kubenswrapper[4665]: I1205 02:10:45.915585 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6"} Dec 05 02:10:45 crc kubenswrapper[4665]: I1205 02:10:45.915949 4665 scope.go:117] "RemoveContainer" containerID="9454a13bc7bda9dc036de003001680a9bad1b1b1f792b30caff16c4e01bdd224" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.176515 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4l59n"] Dec 05 02:12:43 crc kubenswrapper[4665]: E1205 02:12:43.178095 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="registry-server" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.178131 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="registry-server" Dec 05 02:12:43 crc kubenswrapper[4665]: E1205 02:12:43.178153 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="extract-utilities" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.178164 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="extract-utilities" Dec 05 02:12:43 crc kubenswrapper[4665]: E1205 02:12:43.178198 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="extract-content" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.178205 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="extract-content" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.178469 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="58e8f7b0-8c1e-460c-9c61-25ec827bd405" containerName="registry-server" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.180197 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.195868 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4l59n"] Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.297720 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-catalog-content\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.297770 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-utilities\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.297822 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc9c7\" (UniqueName: \"kubernetes.io/projected/635a0c2c-5bc7-4e82-9d51-915016ec3b50-kube-api-access-wc9c7\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.399346 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-catalog-content\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.399396 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-utilities\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.399443 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc9c7\" (UniqueName: \"kubernetes.io/projected/635a0c2c-5bc7-4e82-9d51-915016ec3b50-kube-api-access-wc9c7\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.399912 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-utilities\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.400026 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-catalog-content\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.418960 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc9c7\" (UniqueName: \"kubernetes.io/projected/635a0c2c-5bc7-4e82-9d51-915016ec3b50-kube-api-access-wc9c7\") pod \"certified-operators-4l59n\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:43 crc kubenswrapper[4665]: I1205 02:12:43.500870 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:44 crc kubenswrapper[4665]: I1205 02:12:44.012596 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4l59n"] Dec 05 02:12:45 crc kubenswrapper[4665]: I1205 02:12:45.006485 4665 generic.go:334] "Generic (PLEG): container finished" podID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerID="84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb" exitCode=0 Dec 05 02:12:45 crc kubenswrapper[4665]: I1205 02:12:45.006795 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4l59n" event={"ID":"635a0c2c-5bc7-4e82-9d51-915016ec3b50","Type":"ContainerDied","Data":"84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb"} Dec 05 02:12:45 crc kubenswrapper[4665]: I1205 02:12:45.006831 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4l59n" event={"ID":"635a0c2c-5bc7-4e82-9d51-915016ec3b50","Type":"ContainerStarted","Data":"f744e130fa24089d05bb4ab30d5edb070d0fa5bdca538106d546154300c9302d"} Dec 05 02:12:45 crc kubenswrapper[4665]: I1205 02:12:45.009384 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 02:12:46 crc kubenswrapper[4665]: I1205 02:12:46.022726 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4l59n" event={"ID":"635a0c2c-5bc7-4e82-9d51-915016ec3b50","Type":"ContainerStarted","Data":"8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d"} Dec 05 02:12:47 crc kubenswrapper[4665]: I1205 02:12:47.031608 4665 generic.go:334] "Generic (PLEG): container finished" podID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerID="8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d" exitCode=0 Dec 05 02:12:47 crc kubenswrapper[4665]: I1205 02:12:47.031687 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4l59n" event={"ID":"635a0c2c-5bc7-4e82-9d51-915016ec3b50","Type":"ContainerDied","Data":"8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d"} Dec 05 02:12:48 crc kubenswrapper[4665]: I1205 02:12:48.042839 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4l59n" event={"ID":"635a0c2c-5bc7-4e82-9d51-915016ec3b50","Type":"ContainerStarted","Data":"6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed"} Dec 05 02:12:48 crc kubenswrapper[4665]: I1205 02:12:48.069120 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4l59n" podStartSLOduration=2.579041719 podStartE2EDuration="5.069104494s" podCreationTimestamp="2025-12-05 02:12:43 +0000 UTC" firstStartedPulling="2025-12-05 02:12:45.00910903 +0000 UTC m=+3740.348501329" lastFinishedPulling="2025-12-05 02:12:47.499171805 +0000 UTC m=+3742.838564104" observedRunningTime="2025-12-05 02:12:48.067905985 +0000 UTC m=+3743.407298274" watchObservedRunningTime="2025-12-05 02:12:48.069104494 +0000 UTC m=+3743.408496793" Dec 05 02:12:53 crc kubenswrapper[4665]: I1205 02:12:53.501787 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:53 crc kubenswrapper[4665]: I1205 02:12:53.502283 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:53 crc kubenswrapper[4665]: I1205 02:12:53.548632 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:54 crc kubenswrapper[4665]: I1205 02:12:54.148586 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:54 crc kubenswrapper[4665]: I1205 02:12:54.193983 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4l59n"] Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.106506 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4l59n" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerName="registry-server" containerID="cri-o://6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed" gracePeriod=2 Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.773486 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.840135 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-catalog-content\") pod \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.840250 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-utilities\") pod \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.840356 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc9c7\" (UniqueName: \"kubernetes.io/projected/635a0c2c-5bc7-4e82-9d51-915016ec3b50-kube-api-access-wc9c7\") pod \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\" (UID: \"635a0c2c-5bc7-4e82-9d51-915016ec3b50\") " Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.842344 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-utilities" (OuterVolumeSpecName: "utilities") pod "635a0c2c-5bc7-4e82-9d51-915016ec3b50" (UID: "635a0c2c-5bc7-4e82-9d51-915016ec3b50"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.847705 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/635a0c2c-5bc7-4e82-9d51-915016ec3b50-kube-api-access-wc9c7" (OuterVolumeSpecName: "kube-api-access-wc9c7") pod "635a0c2c-5bc7-4e82-9d51-915016ec3b50" (UID: "635a0c2c-5bc7-4e82-9d51-915016ec3b50"). InnerVolumeSpecName "kube-api-access-wc9c7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.926854 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "635a0c2c-5bc7-4e82-9d51-915016ec3b50" (UID: "635a0c2c-5bc7-4e82-9d51-915016ec3b50"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.943920 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc9c7\" (UniqueName: \"kubernetes.io/projected/635a0c2c-5bc7-4e82-9d51-915016ec3b50-kube-api-access-wc9c7\") on node \"crc\" DevicePath \"\"" Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.943953 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:12:56 crc kubenswrapper[4665]: I1205 02:12:56.943962 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/635a0c2c-5bc7-4e82-9d51-915016ec3b50-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.116691 4665 generic.go:334] "Generic (PLEG): container finished" podID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerID="6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed" exitCode=0 Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.116735 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4l59n" event={"ID":"635a0c2c-5bc7-4e82-9d51-915016ec3b50","Type":"ContainerDied","Data":"6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed"} Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.116761 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4l59n" event={"ID":"635a0c2c-5bc7-4e82-9d51-915016ec3b50","Type":"ContainerDied","Data":"f744e130fa24089d05bb4ab30d5edb070d0fa5bdca538106d546154300c9302d"} Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.116779 4665 scope.go:117] "RemoveContainer" containerID="6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.116800 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4l59n" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.144180 4665 scope.go:117] "RemoveContainer" containerID="8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.156736 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4l59n"] Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.170680 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-4l59n"] Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.191180 4665 scope.go:117] "RemoveContainer" containerID="84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.217791 4665 scope.go:117] "RemoveContainer" containerID="6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed" Dec 05 02:12:57 crc kubenswrapper[4665]: E1205 02:12:57.218266 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed\": container with ID starting with 6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed not found: ID does not exist" containerID="6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.218412 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed"} err="failed to get container status \"6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed\": rpc error: code = NotFound desc = could not find container \"6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed\": container with ID starting with 6ab6c499ef863f15d70b078ec23c71d0ec0d8bdcc5d8bfdd2520e926492568ed not found: ID does not exist" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.218556 4665 scope.go:117] "RemoveContainer" containerID="8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d" Dec 05 02:12:57 crc kubenswrapper[4665]: E1205 02:12:57.218996 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d\": container with ID starting with 8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d not found: ID does not exist" containerID="8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.219045 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d"} err="failed to get container status \"8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d\": rpc error: code = NotFound desc = could not find container \"8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d\": container with ID starting with 8841ef98533789b3b5fafd446009c4d1909b953e7a7587199617c33b4b1b026d not found: ID does not exist" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.219063 4665 scope.go:117] "RemoveContainer" containerID="84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb" Dec 05 02:12:57 crc kubenswrapper[4665]: E1205 02:12:57.219347 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb\": container with ID starting with 84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb not found: ID does not exist" containerID="84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb" Dec 05 02:12:57 crc kubenswrapper[4665]: I1205 02:12:57.219377 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb"} err="failed to get container status \"84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb\": rpc error: code = NotFound desc = could not find container \"84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb\": container with ID starting with 84d4beae129b6fe6fe81105ebf80b5fe2929adc094c6eac3fde73be65ad3d1bb not found: ID does not exist" Dec 05 02:12:58 crc kubenswrapper[4665]: I1205 02:12:58.907890 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" path="/var/lib/kubelet/pods/635a0c2c-5bc7-4e82-9d51-915016ec3b50/volumes" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.192394 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s29pn"] Dec 05 02:12:59 crc kubenswrapper[4665]: E1205 02:12:59.192790 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerName="extract-utilities" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.192806 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerName="extract-utilities" Dec 05 02:12:59 crc kubenswrapper[4665]: E1205 02:12:59.192830 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerName="extract-content" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.192837 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerName="extract-content" Dec 05 02:12:59 crc kubenswrapper[4665]: E1205 02:12:59.192855 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerName="registry-server" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.192861 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerName="registry-server" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.193055 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="635a0c2c-5bc7-4e82-9d51-915016ec3b50" containerName="registry-server" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.195172 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.208042 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s29pn"] Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.307049 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tqjzp\" (UniqueName: \"kubernetes.io/projected/35c43353-c7e2-4e05-b0c9-2c36949af528-kube-api-access-tqjzp\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.307163 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-utilities\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.308618 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-catalog-content\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.410523 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tqjzp\" (UniqueName: \"kubernetes.io/projected/35c43353-c7e2-4e05-b0c9-2c36949af528-kube-api-access-tqjzp\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.410869 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-utilities\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.410949 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-catalog-content\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.411369 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-utilities\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.411405 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-catalog-content\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.429004 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tqjzp\" (UniqueName: \"kubernetes.io/projected/35c43353-c7e2-4e05-b0c9-2c36949af528-kube-api-access-tqjzp\") pod \"community-operators-s29pn\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:12:59 crc kubenswrapper[4665]: I1205 02:12:59.518256 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:13:00 crc kubenswrapper[4665]: I1205 02:13:00.180940 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s29pn"] Dec 05 02:13:01 crc kubenswrapper[4665]: I1205 02:13:01.155265 4665 generic.go:334] "Generic (PLEG): container finished" podID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerID="9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a" exitCode=0 Dec 05 02:13:01 crc kubenswrapper[4665]: I1205 02:13:01.155773 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s29pn" event={"ID":"35c43353-c7e2-4e05-b0c9-2c36949af528","Type":"ContainerDied","Data":"9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a"} Dec 05 02:13:01 crc kubenswrapper[4665]: I1205 02:13:01.156479 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s29pn" event={"ID":"35c43353-c7e2-4e05-b0c9-2c36949af528","Type":"ContainerStarted","Data":"87b2ab11f72c49a7334dbeb4cbbeb7fcf1f0b8c033d7bdf7a57812f9eedde324"} Dec 05 02:13:03 crc kubenswrapper[4665]: I1205 02:13:03.175436 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s29pn" event={"ID":"35c43353-c7e2-4e05-b0c9-2c36949af528","Type":"ContainerStarted","Data":"d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e"} Dec 05 02:13:04 crc kubenswrapper[4665]: I1205 02:13:04.185651 4665 generic.go:334] "Generic (PLEG): container finished" podID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerID="d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e" exitCode=0 Dec 05 02:13:04 crc kubenswrapper[4665]: I1205 02:13:04.185696 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s29pn" event={"ID":"35c43353-c7e2-4e05-b0c9-2c36949af528","Type":"ContainerDied","Data":"d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e"} Dec 05 02:13:05 crc kubenswrapper[4665]: I1205 02:13:05.196278 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s29pn" event={"ID":"35c43353-c7e2-4e05-b0c9-2c36949af528","Type":"ContainerStarted","Data":"5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778"} Dec 05 02:13:05 crc kubenswrapper[4665]: I1205 02:13:05.224278 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s29pn" podStartSLOduration=2.770856916 podStartE2EDuration="6.224255115s" podCreationTimestamp="2025-12-05 02:12:59 +0000 UTC" firstStartedPulling="2025-12-05 02:13:01.157845376 +0000 UTC m=+3756.497237675" lastFinishedPulling="2025-12-05 02:13:04.611243575 +0000 UTC m=+3759.950635874" observedRunningTime="2025-12-05 02:13:05.21910609 +0000 UTC m=+3760.558498419" watchObservedRunningTime="2025-12-05 02:13:05.224255115 +0000 UTC m=+3760.563647414" Dec 05 02:13:09 crc kubenswrapper[4665]: I1205 02:13:09.519032 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:13:09 crc kubenswrapper[4665]: I1205 02:13:09.519662 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:13:09 crc kubenswrapper[4665]: I1205 02:13:09.567316 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:13:10 crc kubenswrapper[4665]: I1205 02:13:10.287446 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:13:10 crc kubenswrapper[4665]: I1205 02:13:10.342664 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s29pn"] Dec 05 02:13:12 crc kubenswrapper[4665]: I1205 02:13:12.255049 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s29pn" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerName="registry-server" containerID="cri-o://5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778" gracePeriod=2 Dec 05 02:13:12 crc kubenswrapper[4665]: I1205 02:13:12.872987 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:13:12 crc kubenswrapper[4665]: I1205 02:13:12.985589 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-utilities\") pod \"35c43353-c7e2-4e05-b0c9-2c36949af528\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " Dec 05 02:13:12 crc kubenswrapper[4665]: I1205 02:13:12.986731 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-catalog-content\") pod \"35c43353-c7e2-4e05-b0c9-2c36949af528\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " Dec 05 02:13:12 crc kubenswrapper[4665]: I1205 02:13:12.986856 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tqjzp\" (UniqueName: \"kubernetes.io/projected/35c43353-c7e2-4e05-b0c9-2c36949af528-kube-api-access-tqjzp\") pod \"35c43353-c7e2-4e05-b0c9-2c36949af528\" (UID: \"35c43353-c7e2-4e05-b0c9-2c36949af528\") " Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:12.986597 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-utilities" (OuterVolumeSpecName: "utilities") pod "35c43353-c7e2-4e05-b0c9-2c36949af528" (UID: "35c43353-c7e2-4e05-b0c9-2c36949af528"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:12.999073 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35c43353-c7e2-4e05-b0c9-2c36949af528-kube-api-access-tqjzp" (OuterVolumeSpecName: "kube-api-access-tqjzp") pod "35c43353-c7e2-4e05-b0c9-2c36949af528" (UID: "35c43353-c7e2-4e05-b0c9-2c36949af528"). InnerVolumeSpecName "kube-api-access-tqjzp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.072424 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35c43353-c7e2-4e05-b0c9-2c36949af528" (UID: "35c43353-c7e2-4e05-b0c9-2c36949af528"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.088819 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.088858 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35c43353-c7e2-4e05-b0c9-2c36949af528-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.088873 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tqjzp\" (UniqueName: \"kubernetes.io/projected/35c43353-c7e2-4e05-b0c9-2c36949af528-kube-api-access-tqjzp\") on node \"crc\" DevicePath \"\"" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.266993 4665 generic.go:334] "Generic (PLEG): container finished" podID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerID="5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778" exitCode=0 Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.267083 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s29pn" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.267083 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s29pn" event={"ID":"35c43353-c7e2-4e05-b0c9-2c36949af528","Type":"ContainerDied","Data":"5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778"} Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.267416 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s29pn" event={"ID":"35c43353-c7e2-4e05-b0c9-2c36949af528","Type":"ContainerDied","Data":"87b2ab11f72c49a7334dbeb4cbbeb7fcf1f0b8c033d7bdf7a57812f9eedde324"} Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.267440 4665 scope.go:117] "RemoveContainer" containerID="5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.300307 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s29pn"] Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.307833 4665 scope.go:117] "RemoveContainer" containerID="d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.309859 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s29pn"] Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.329761 4665 scope.go:117] "RemoveContainer" containerID="9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.372400 4665 scope.go:117] "RemoveContainer" containerID="5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778" Dec 05 02:13:13 crc kubenswrapper[4665]: E1205 02:13:13.372857 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778\": container with ID starting with 5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778 not found: ID does not exist" containerID="5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.372898 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778"} err="failed to get container status \"5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778\": rpc error: code = NotFound desc = could not find container \"5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778\": container with ID starting with 5fe2f52ed10e20fd37159c8608d6a4fda2e32cfa1fd5971c013127ab10b07778 not found: ID does not exist" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.372929 4665 scope.go:117] "RemoveContainer" containerID="d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e" Dec 05 02:13:13 crc kubenswrapper[4665]: E1205 02:13:13.373702 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e\": container with ID starting with d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e not found: ID does not exist" containerID="d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.373733 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e"} err="failed to get container status \"d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e\": rpc error: code = NotFound desc = could not find container \"d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e\": container with ID starting with d4cd62ea1528dc8d95bcb442c1987b7173719df14dc2624136f2608967622e2e not found: ID does not exist" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.373759 4665 scope.go:117] "RemoveContainer" containerID="9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a" Dec 05 02:13:13 crc kubenswrapper[4665]: E1205 02:13:13.374030 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a\": container with ID starting with 9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a not found: ID does not exist" containerID="9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a" Dec 05 02:13:13 crc kubenswrapper[4665]: I1205 02:13:13.374073 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a"} err="failed to get container status \"9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a\": rpc error: code = NotFound desc = could not find container \"9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a\": container with ID starting with 9a0378ee4a83e21a9c7aaa08cb3c41fa92d1f2d77e7d5aba0b3c64b6814fd50a not found: ID does not exist" Dec 05 02:13:14 crc kubenswrapper[4665]: I1205 02:13:14.905733 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" path="/var/lib/kubelet/pods/35c43353-c7e2-4e05-b0c9-2c36949af528/volumes" Dec 05 02:13:14 crc kubenswrapper[4665]: I1205 02:13:14.921933 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:13:14 crc kubenswrapper[4665]: I1205 02:13:14.921987 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.481214 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wh72z"] Dec 05 02:13:18 crc kubenswrapper[4665]: E1205 02:13:18.481877 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerName="extract-content" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.481890 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerName="extract-content" Dec 05 02:13:18 crc kubenswrapper[4665]: E1205 02:13:18.481920 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerName="registry-server" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.481928 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerName="registry-server" Dec 05 02:13:18 crc kubenswrapper[4665]: E1205 02:13:18.481945 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerName="extract-utilities" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.481952 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerName="extract-utilities" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.482131 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="35c43353-c7e2-4e05-b0c9-2c36949af528" containerName="registry-server" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.483486 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.497623 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wh72z"] Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.598778 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jn8rd\" (UniqueName: \"kubernetes.io/projected/89636dc9-47f0-4382-ae3d-23505a49ad59-kube-api-access-jn8rd\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.598835 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-catalog-content\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.598864 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-utilities\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.700731 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-catalog-content\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.700791 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-utilities\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.700937 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jn8rd\" (UniqueName: \"kubernetes.io/projected/89636dc9-47f0-4382-ae3d-23505a49ad59-kube-api-access-jn8rd\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.701264 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-catalog-content\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.701313 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-utilities\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.734476 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jn8rd\" (UniqueName: \"kubernetes.io/projected/89636dc9-47f0-4382-ae3d-23505a49ad59-kube-api-access-jn8rd\") pod \"redhat-marketplace-wh72z\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:18 crc kubenswrapper[4665]: I1205 02:13:18.801009 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:19 crc kubenswrapper[4665]: I1205 02:13:19.239022 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wh72z"] Dec 05 02:13:19 crc kubenswrapper[4665]: I1205 02:13:19.327608 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wh72z" event={"ID":"89636dc9-47f0-4382-ae3d-23505a49ad59","Type":"ContainerStarted","Data":"c1a30f6a34d85bf23ff5fa33a7c73e5dba474e1bbc0745912d849d06628d09a1"} Dec 05 02:13:20 crc kubenswrapper[4665]: I1205 02:13:20.338147 4665 generic.go:334] "Generic (PLEG): container finished" podID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerID="873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03" exitCode=0 Dec 05 02:13:20 crc kubenswrapper[4665]: I1205 02:13:20.338272 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wh72z" event={"ID":"89636dc9-47f0-4382-ae3d-23505a49ad59","Type":"ContainerDied","Data":"873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03"} Dec 05 02:13:21 crc kubenswrapper[4665]: I1205 02:13:21.349578 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wh72z" event={"ID":"89636dc9-47f0-4382-ae3d-23505a49ad59","Type":"ContainerStarted","Data":"e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf"} Dec 05 02:13:22 crc kubenswrapper[4665]: I1205 02:13:22.360071 4665 generic.go:334] "Generic (PLEG): container finished" podID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerID="e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf" exitCode=0 Dec 05 02:13:22 crc kubenswrapper[4665]: I1205 02:13:22.360151 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wh72z" event={"ID":"89636dc9-47f0-4382-ae3d-23505a49ad59","Type":"ContainerDied","Data":"e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf"} Dec 05 02:13:23 crc kubenswrapper[4665]: I1205 02:13:23.371854 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wh72z" event={"ID":"89636dc9-47f0-4382-ae3d-23505a49ad59","Type":"ContainerStarted","Data":"4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a"} Dec 05 02:13:23 crc kubenswrapper[4665]: I1205 02:13:23.396093 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wh72z" podStartSLOduration=2.936918846 podStartE2EDuration="5.396071336s" podCreationTimestamp="2025-12-05 02:13:18 +0000 UTC" firstStartedPulling="2025-12-05 02:13:20.34100869 +0000 UTC m=+3775.680400989" lastFinishedPulling="2025-12-05 02:13:22.80016118 +0000 UTC m=+3778.139553479" observedRunningTime="2025-12-05 02:13:23.389033905 +0000 UTC m=+3778.728426204" watchObservedRunningTime="2025-12-05 02:13:23.396071336 +0000 UTC m=+3778.735463635" Dec 05 02:13:28 crc kubenswrapper[4665]: I1205 02:13:28.802016 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:28 crc kubenswrapper[4665]: I1205 02:13:28.802584 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:28 crc kubenswrapper[4665]: I1205 02:13:28.859694 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:29 crc kubenswrapper[4665]: I1205 02:13:29.472943 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:30 crc kubenswrapper[4665]: I1205 02:13:30.103092 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wh72z"] Dec 05 02:13:31 crc kubenswrapper[4665]: I1205 02:13:31.447148 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wh72z" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerName="registry-server" containerID="cri-o://4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a" gracePeriod=2 Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.170037 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.282499 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-utilities\") pod \"89636dc9-47f0-4382-ae3d-23505a49ad59\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.282574 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-catalog-content\") pod \"89636dc9-47f0-4382-ae3d-23505a49ad59\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.282678 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jn8rd\" (UniqueName: \"kubernetes.io/projected/89636dc9-47f0-4382-ae3d-23505a49ad59-kube-api-access-jn8rd\") pod \"89636dc9-47f0-4382-ae3d-23505a49ad59\" (UID: \"89636dc9-47f0-4382-ae3d-23505a49ad59\") " Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.283603 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-utilities" (OuterVolumeSpecName: "utilities") pod "89636dc9-47f0-4382-ae3d-23505a49ad59" (UID: "89636dc9-47f0-4382-ae3d-23505a49ad59"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.301140 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/89636dc9-47f0-4382-ae3d-23505a49ad59-kube-api-access-jn8rd" (OuterVolumeSpecName: "kube-api-access-jn8rd") pod "89636dc9-47f0-4382-ae3d-23505a49ad59" (UID: "89636dc9-47f0-4382-ae3d-23505a49ad59"). InnerVolumeSpecName "kube-api-access-jn8rd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.302841 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "89636dc9-47f0-4382-ae3d-23505a49ad59" (UID: "89636dc9-47f0-4382-ae3d-23505a49ad59"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.384289 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.384334 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/89636dc9-47f0-4382-ae3d-23505a49ad59-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.384348 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jn8rd\" (UniqueName: \"kubernetes.io/projected/89636dc9-47f0-4382-ae3d-23505a49ad59-kube-api-access-jn8rd\") on node \"crc\" DevicePath \"\"" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.455386 4665 generic.go:334] "Generic (PLEG): container finished" podID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerID="4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a" exitCode=0 Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.455423 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wh72z" event={"ID":"89636dc9-47f0-4382-ae3d-23505a49ad59","Type":"ContainerDied","Data":"4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a"} Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.455454 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wh72z" event={"ID":"89636dc9-47f0-4382-ae3d-23505a49ad59","Type":"ContainerDied","Data":"c1a30f6a34d85bf23ff5fa33a7c73e5dba474e1bbc0745912d849d06628d09a1"} Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.455472 4665 scope.go:117] "RemoveContainer" containerID="4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.455476 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wh72z" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.506359 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wh72z"] Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.513471 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wh72z"] Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.515806 4665 scope.go:117] "RemoveContainer" containerID="e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.544381 4665 scope.go:117] "RemoveContainer" containerID="873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.585681 4665 scope.go:117] "RemoveContainer" containerID="4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a" Dec 05 02:13:32 crc kubenswrapper[4665]: E1205 02:13:32.586209 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a\": container with ID starting with 4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a not found: ID does not exist" containerID="4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.586241 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a"} err="failed to get container status \"4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a\": rpc error: code = NotFound desc = could not find container \"4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a\": container with ID starting with 4f1daae07bd9d890ad07ac585249ae7783bcc828507fe7a1fa1d0baad928b48a not found: ID does not exist" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.586263 4665 scope.go:117] "RemoveContainer" containerID="e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf" Dec 05 02:13:32 crc kubenswrapper[4665]: E1205 02:13:32.586573 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf\": container with ID starting with e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf not found: ID does not exist" containerID="e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.586593 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf"} err="failed to get container status \"e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf\": rpc error: code = NotFound desc = could not find container \"e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf\": container with ID starting with e211aa7bc555f7e26ee40b45665cd1beadd5d1fd13d97e8b88a360c454dfa1bf not found: ID does not exist" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.586607 4665 scope.go:117] "RemoveContainer" containerID="873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03" Dec 05 02:13:32 crc kubenswrapper[4665]: E1205 02:13:32.587099 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03\": container with ID starting with 873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03 not found: ID does not exist" containerID="873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.587138 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03"} err="failed to get container status \"873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03\": rpc error: code = NotFound desc = could not find container \"873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03\": container with ID starting with 873cdd00b03a596aea82de90b4581452e2d650fcab3f316a1b823a2ed7199e03 not found: ID does not exist" Dec 05 02:13:32 crc kubenswrapper[4665]: I1205 02:13:32.903494 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" path="/var/lib/kubelet/pods/89636dc9-47f0-4382-ae3d-23505a49ad59/volumes" Dec 05 02:13:44 crc kubenswrapper[4665]: I1205 02:13:44.922992 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:13:44 crc kubenswrapper[4665]: I1205 02:13:44.923654 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:14:14 crc kubenswrapper[4665]: I1205 02:14:14.922009 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:14:14 crc kubenswrapper[4665]: I1205 02:14:14.922576 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:14:14 crc kubenswrapper[4665]: I1205 02:14:14.922626 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 02:14:14 crc kubenswrapper[4665]: I1205 02:14:14.923489 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 02:14:14 crc kubenswrapper[4665]: I1205 02:14:14.923544 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" gracePeriod=600 Dec 05 02:14:15 crc kubenswrapper[4665]: E1205 02:14:15.050114 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:14:15 crc kubenswrapper[4665]: I1205 02:14:15.838423 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" exitCode=0 Dec 05 02:14:15 crc kubenswrapper[4665]: I1205 02:14:15.838473 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6"} Dec 05 02:14:15 crc kubenswrapper[4665]: I1205 02:14:15.838809 4665 scope.go:117] "RemoveContainer" containerID="9d031f70e8ccc70a0b8d1b637fb5ab62e2ead5a4a2c6250917b8cd69adbf52c6" Dec 05 02:14:15 crc kubenswrapper[4665]: I1205 02:14:15.839820 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:14:15 crc kubenswrapper[4665]: E1205 02:14:15.840176 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:14:30 crc kubenswrapper[4665]: I1205 02:14:30.894510 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:14:30 crc kubenswrapper[4665]: E1205 02:14:30.897049 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:14:43 crc kubenswrapper[4665]: I1205 02:14:43.893651 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:14:43 crc kubenswrapper[4665]: E1205 02:14:43.894373 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:14:58 crc kubenswrapper[4665]: I1205 02:14:58.894267 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:14:58 crc kubenswrapper[4665]: E1205 02:14:58.895054 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.198192 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7"] Dec 05 02:15:00 crc kubenswrapper[4665]: E1205 02:15:00.198915 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerName="extract-content" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.198928 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerName="extract-content" Dec 05 02:15:00 crc kubenswrapper[4665]: E1205 02:15:00.198938 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerName="extract-utilities" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.198944 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerName="extract-utilities" Dec 05 02:15:00 crc kubenswrapper[4665]: E1205 02:15:00.198970 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerName="registry-server" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.198979 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerName="registry-server" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.199168 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="89636dc9-47f0-4382-ae3d-23505a49ad59" containerName="registry-server" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.200037 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.205829 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.207045 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.235039 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7"] Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.352346 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/38603037-5322-4862-b028-08abd6d7c8f1-secret-volume\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.352422 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/38603037-5322-4862-b028-08abd6d7c8f1-config-volume\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.352472 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs5nk\" (UniqueName: \"kubernetes.io/projected/38603037-5322-4862-b028-08abd6d7c8f1-kube-api-access-rs5nk\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.453868 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/38603037-5322-4862-b028-08abd6d7c8f1-secret-volume\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.453940 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/38603037-5322-4862-b028-08abd6d7c8f1-config-volume\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.454004 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs5nk\" (UniqueName: \"kubernetes.io/projected/38603037-5322-4862-b028-08abd6d7c8f1-kube-api-access-rs5nk\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.455104 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/38603037-5322-4862-b028-08abd6d7c8f1-config-volume\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.472909 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs5nk\" (UniqueName: \"kubernetes.io/projected/38603037-5322-4862-b028-08abd6d7c8f1-kube-api-access-rs5nk\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.473078 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/38603037-5322-4862-b028-08abd6d7c8f1-secret-volume\") pod \"collect-profiles-29415015-sh8p7\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:00 crc kubenswrapper[4665]: I1205 02:15:00.518585 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:01 crc kubenswrapper[4665]: I1205 02:15:01.003988 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7"] Dec 05 02:15:01 crc kubenswrapper[4665]: I1205 02:15:01.254584 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" event={"ID":"38603037-5322-4862-b028-08abd6d7c8f1","Type":"ContainerStarted","Data":"7578e35472bd994eabf54bb2a3cfa070bd3809764f461ee5c462492c8516cd45"} Dec 05 02:15:01 crc kubenswrapper[4665]: I1205 02:15:01.254927 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" event={"ID":"38603037-5322-4862-b028-08abd6d7c8f1","Type":"ContainerStarted","Data":"3b23ea265d5e8a5b8c12510ea3d3064dab73e3777778ba61da4aeb56d802428a"} Dec 05 02:15:01 crc kubenswrapper[4665]: I1205 02:15:01.271239 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" podStartSLOduration=1.271222645 podStartE2EDuration="1.271222645s" podCreationTimestamp="2025-12-05 02:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 02:15:01.269634267 +0000 UTC m=+3876.609026566" watchObservedRunningTime="2025-12-05 02:15:01.271222645 +0000 UTC m=+3876.610614944" Dec 05 02:15:02 crc kubenswrapper[4665]: I1205 02:15:02.265478 4665 generic.go:334] "Generic (PLEG): container finished" podID="38603037-5322-4862-b028-08abd6d7c8f1" containerID="7578e35472bd994eabf54bb2a3cfa070bd3809764f461ee5c462492c8516cd45" exitCode=0 Dec 05 02:15:02 crc kubenswrapper[4665]: I1205 02:15:02.265666 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" event={"ID":"38603037-5322-4862-b028-08abd6d7c8f1","Type":"ContainerDied","Data":"7578e35472bd994eabf54bb2a3cfa070bd3809764f461ee5c462492c8516cd45"} Dec 05 02:15:03 crc kubenswrapper[4665]: I1205 02:15:03.768368 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:03 crc kubenswrapper[4665]: I1205 02:15:03.922788 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs5nk\" (UniqueName: \"kubernetes.io/projected/38603037-5322-4862-b028-08abd6d7c8f1-kube-api-access-rs5nk\") pod \"38603037-5322-4862-b028-08abd6d7c8f1\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " Dec 05 02:15:03 crc kubenswrapper[4665]: I1205 02:15:03.922867 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/38603037-5322-4862-b028-08abd6d7c8f1-config-volume\") pod \"38603037-5322-4862-b028-08abd6d7c8f1\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " Dec 05 02:15:03 crc kubenswrapper[4665]: I1205 02:15:03.922930 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/38603037-5322-4862-b028-08abd6d7c8f1-secret-volume\") pod \"38603037-5322-4862-b028-08abd6d7c8f1\" (UID: \"38603037-5322-4862-b028-08abd6d7c8f1\") " Dec 05 02:15:03 crc kubenswrapper[4665]: I1205 02:15:03.924039 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/38603037-5322-4862-b028-08abd6d7c8f1-config-volume" (OuterVolumeSpecName: "config-volume") pod "38603037-5322-4862-b028-08abd6d7c8f1" (UID: "38603037-5322-4862-b028-08abd6d7c8f1"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 02:15:03 crc kubenswrapper[4665]: I1205 02:15:03.927752 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/38603037-5322-4862-b028-08abd6d7c8f1-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "38603037-5322-4862-b028-08abd6d7c8f1" (UID: "38603037-5322-4862-b028-08abd6d7c8f1"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:15:03 crc kubenswrapper[4665]: I1205 02:15:03.928430 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/38603037-5322-4862-b028-08abd6d7c8f1-kube-api-access-rs5nk" (OuterVolumeSpecName: "kube-api-access-rs5nk") pod "38603037-5322-4862-b028-08abd6d7c8f1" (UID: "38603037-5322-4862-b028-08abd6d7c8f1"). InnerVolumeSpecName "kube-api-access-rs5nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.025476 4665 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/38603037-5322-4862-b028-08abd6d7c8f1-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.025508 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs5nk\" (UniqueName: \"kubernetes.io/projected/38603037-5322-4862-b028-08abd6d7c8f1-kube-api-access-rs5nk\") on node \"crc\" DevicePath \"\"" Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.025517 4665 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/38603037-5322-4862-b028-08abd6d7c8f1-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.282729 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" event={"ID":"38603037-5322-4862-b028-08abd6d7c8f1","Type":"ContainerDied","Data":"3b23ea265d5e8a5b8c12510ea3d3064dab73e3777778ba61da4aeb56d802428a"} Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.283266 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3b23ea265d5e8a5b8c12510ea3d3064dab73e3777778ba61da4aeb56d802428a" Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.282948 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415015-sh8p7" Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.353144 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4"] Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.361842 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414970-dflj4"] Dec 05 02:15:04 crc kubenswrapper[4665]: I1205 02:15:04.905467 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="00725aa1-db1b-4f33-8026-ba623cf93fca" path="/var/lib/kubelet/pods/00725aa1-db1b-4f33-8026-ba623cf93fca/volumes" Dec 05 02:15:12 crc kubenswrapper[4665]: I1205 02:15:12.893371 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:15:12 crc kubenswrapper[4665]: E1205 02:15:12.894259 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:15:25 crc kubenswrapper[4665]: I1205 02:15:25.894017 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:15:25 crc kubenswrapper[4665]: E1205 02:15:25.894641 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:15:37 crc kubenswrapper[4665]: I1205 02:15:37.893838 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:15:37 crc kubenswrapper[4665]: E1205 02:15:37.894609 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:15:39 crc kubenswrapper[4665]: I1205 02:15:39.171971 4665 scope.go:117] "RemoveContainer" containerID="7f31dca1d516a7c511aca131b33be79d264115c9d8e1cae74cc1c63cf898bdad" Dec 05 02:15:52 crc kubenswrapper[4665]: I1205 02:15:52.894254 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:15:52 crc kubenswrapper[4665]: E1205 02:15:52.896417 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:16:07 crc kubenswrapper[4665]: I1205 02:16:07.893554 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:16:07 crc kubenswrapper[4665]: E1205 02:16:07.894199 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:16:19 crc kubenswrapper[4665]: I1205 02:16:19.894249 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:16:19 crc kubenswrapper[4665]: E1205 02:16:19.895182 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:16:33 crc kubenswrapper[4665]: I1205 02:16:33.893399 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:16:33 crc kubenswrapper[4665]: E1205 02:16:33.894133 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:16:47 crc kubenswrapper[4665]: I1205 02:16:47.894391 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:16:47 crc kubenswrapper[4665]: E1205 02:16:47.895111 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:17:02 crc kubenswrapper[4665]: I1205 02:17:02.893522 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:17:02 crc kubenswrapper[4665]: E1205 02:17:02.894329 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:17:17 crc kubenswrapper[4665]: I1205 02:17:17.893832 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:17:17 crc kubenswrapper[4665]: E1205 02:17:17.894523 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:17:28 crc kubenswrapper[4665]: I1205 02:17:28.893513 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:17:28 crc kubenswrapper[4665]: E1205 02:17:28.894392 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:17:40 crc kubenswrapper[4665]: I1205 02:17:40.893739 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:17:40 crc kubenswrapper[4665]: E1205 02:17:40.894647 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:17:51 crc kubenswrapper[4665]: I1205 02:17:51.894422 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:17:51 crc kubenswrapper[4665]: E1205 02:17:51.895812 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:18:05 crc kubenswrapper[4665]: I1205 02:18:05.893971 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:18:05 crc kubenswrapper[4665]: E1205 02:18:05.894748 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:18:12 crc kubenswrapper[4665]: I1205 02:18:12.834748 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hp54f"] Dec 05 02:18:12 crc kubenswrapper[4665]: E1205 02:18:12.835805 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="38603037-5322-4862-b028-08abd6d7c8f1" containerName="collect-profiles" Dec 05 02:18:12 crc kubenswrapper[4665]: I1205 02:18:12.835838 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="38603037-5322-4862-b028-08abd6d7c8f1" containerName="collect-profiles" Dec 05 02:18:12 crc kubenswrapper[4665]: I1205 02:18:12.836093 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="38603037-5322-4862-b028-08abd6d7c8f1" containerName="collect-profiles" Dec 05 02:18:12 crc kubenswrapper[4665]: I1205 02:18:12.838863 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:12 crc kubenswrapper[4665]: I1205 02:18:12.854995 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hp54f"] Dec 05 02:18:12 crc kubenswrapper[4665]: I1205 02:18:12.916912 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-utilities\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:12 crc kubenswrapper[4665]: I1205 02:18:12.917538 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-catalog-content\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:12 crc kubenswrapper[4665]: I1205 02:18:12.917840 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blvz6\" (UniqueName: \"kubernetes.io/projected/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-kube-api-access-blvz6\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.019809 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blvz6\" (UniqueName: \"kubernetes.io/projected/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-kube-api-access-blvz6\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.019927 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-utilities\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.019948 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-catalog-content\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.020423 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-catalog-content\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.020910 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-utilities\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.053372 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blvz6\" (UniqueName: \"kubernetes.io/projected/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-kube-api-access-blvz6\") pod \"redhat-operators-hp54f\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.167213 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.652881 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hp54f"] Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.998140 4665 generic.go:334] "Generic (PLEG): container finished" podID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerID="1f66e9ea64ed1314aeb7cf4bfa3cfb722938b04d09edc6678c0e3f12296f94e5" exitCode=0 Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.998364 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hp54f" event={"ID":"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a","Type":"ContainerDied","Data":"1f66e9ea64ed1314aeb7cf4bfa3cfb722938b04d09edc6678c0e3f12296f94e5"} Dec 05 02:18:13 crc kubenswrapper[4665]: I1205 02:18:13.998428 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hp54f" event={"ID":"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a","Type":"ContainerStarted","Data":"6fb0d632ba6dcf99945a2e146a2621449f442ce8fdd7b23bb0a3a5519217fa84"} Dec 05 02:18:14 crc kubenswrapper[4665]: I1205 02:18:14.000441 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 02:18:16 crc kubenswrapper[4665]: I1205 02:18:16.021580 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hp54f" event={"ID":"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a","Type":"ContainerStarted","Data":"1e176b3dff520b212db425a0ffa4a9d7d2ca38bcaf1976249351d9173dac8427"} Dec 05 02:18:17 crc kubenswrapper[4665]: E1205 02:18:17.374350 4665 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8992e79d_9fe6_49dc_b749_b3ffeaca1d9a.slice/crio-1e176b3dff520b212db425a0ffa4a9d7d2ca38bcaf1976249351d9173dac8427.scope\": RecentStats: unable to find data in memory cache]" Dec 05 02:18:18 crc kubenswrapper[4665]: I1205 02:18:18.064859 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hp54f" event={"ID":"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a","Type":"ContainerDied","Data":"1e176b3dff520b212db425a0ffa4a9d7d2ca38bcaf1976249351d9173dac8427"} Dec 05 02:18:18 crc kubenswrapper[4665]: I1205 02:18:18.065769 4665 generic.go:334] "Generic (PLEG): container finished" podID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerID="1e176b3dff520b212db425a0ffa4a9d7d2ca38bcaf1976249351d9173dac8427" exitCode=0 Dec 05 02:18:19 crc kubenswrapper[4665]: I1205 02:18:19.893637 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:18:19 crc kubenswrapper[4665]: E1205 02:18:19.894347 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:18:20 crc kubenswrapper[4665]: I1205 02:18:20.085944 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hp54f" event={"ID":"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a","Type":"ContainerStarted","Data":"dcb230ce19efdd545231a804f7a9dfa3e35e085704d747f901d71d631bc43539"} Dec 05 02:18:20 crc kubenswrapper[4665]: I1205 02:18:20.114493 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hp54f" podStartSLOduration=3.632850163 podStartE2EDuration="8.114473577s" podCreationTimestamp="2025-12-05 02:18:12 +0000 UTC" firstStartedPulling="2025-12-05 02:18:14.000182453 +0000 UTC m=+4069.339574762" lastFinishedPulling="2025-12-05 02:18:18.481805877 +0000 UTC m=+4073.821198176" observedRunningTime="2025-12-05 02:18:20.107731174 +0000 UTC m=+4075.447123473" watchObservedRunningTime="2025-12-05 02:18:20.114473577 +0000 UTC m=+4075.453865876" Dec 05 02:18:23 crc kubenswrapper[4665]: I1205 02:18:23.168243 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:23 crc kubenswrapper[4665]: I1205 02:18:23.169725 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:24 crc kubenswrapper[4665]: I1205 02:18:24.219401 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hp54f" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="registry-server" probeResult="failure" output=< Dec 05 02:18:24 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 02:18:24 crc kubenswrapper[4665]: > Dec 05 02:18:33 crc kubenswrapper[4665]: I1205 02:18:33.232434 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:33 crc kubenswrapper[4665]: I1205 02:18:33.290519 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:33 crc kubenswrapper[4665]: I1205 02:18:33.471885 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hp54f"] Dec 05 02:18:34 crc kubenswrapper[4665]: I1205 02:18:34.901038 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:18:34 crc kubenswrapper[4665]: E1205 02:18:34.901382 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:18:35 crc kubenswrapper[4665]: I1205 02:18:35.216977 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hp54f" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="registry-server" containerID="cri-o://dcb230ce19efdd545231a804f7a9dfa3e35e085704d747f901d71d631bc43539" gracePeriod=2 Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.229421 4665 generic.go:334] "Generic (PLEG): container finished" podID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerID="dcb230ce19efdd545231a804f7a9dfa3e35e085704d747f901d71d631bc43539" exitCode=0 Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.229464 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hp54f" event={"ID":"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a","Type":"ContainerDied","Data":"dcb230ce19efdd545231a804f7a9dfa3e35e085704d747f901d71d631bc43539"} Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.229986 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hp54f" event={"ID":"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a","Type":"ContainerDied","Data":"6fb0d632ba6dcf99945a2e146a2621449f442ce8fdd7b23bb0a3a5519217fa84"} Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.230012 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6fb0d632ba6dcf99945a2e146a2621449f442ce8fdd7b23bb0a3a5519217fa84" Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.369474 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.460686 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-catalog-content\") pod \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.460762 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blvz6\" (UniqueName: \"kubernetes.io/projected/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-kube-api-access-blvz6\") pod \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.462024 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-utilities\") pod \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\" (UID: \"8992e79d-9fe6-49dc-b749-b3ffeaca1d9a\") " Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.463099 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-utilities" (OuterVolumeSpecName: "utilities") pod "8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" (UID: "8992e79d-9fe6-49dc-b749-b3ffeaca1d9a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.464103 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.505518 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-kube-api-access-blvz6" (OuterVolumeSpecName: "kube-api-access-blvz6") pod "8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" (UID: "8992e79d-9fe6-49dc-b749-b3ffeaca1d9a"). InnerVolumeSpecName "kube-api-access-blvz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.566153 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blvz6\" (UniqueName: \"kubernetes.io/projected/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-kube-api-access-blvz6\") on node \"crc\" DevicePath \"\"" Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.591728 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" (UID: "8992e79d-9fe6-49dc-b749-b3ffeaca1d9a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:18:36 crc kubenswrapper[4665]: I1205 02:18:36.668165 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:18:37 crc kubenswrapper[4665]: I1205 02:18:37.240066 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hp54f" Dec 05 02:18:37 crc kubenswrapper[4665]: I1205 02:18:37.263079 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hp54f"] Dec 05 02:18:37 crc kubenswrapper[4665]: I1205 02:18:37.272953 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hp54f"] Dec 05 02:18:38 crc kubenswrapper[4665]: I1205 02:18:38.907076 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" path="/var/lib/kubelet/pods/8992e79d-9fe6-49dc-b749-b3ffeaca1d9a/volumes" Dec 05 02:18:45 crc kubenswrapper[4665]: I1205 02:18:45.893204 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:18:45 crc kubenswrapper[4665]: E1205 02:18:45.894763 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:19:00 crc kubenswrapper[4665]: I1205 02:19:00.895327 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:19:00 crc kubenswrapper[4665]: E1205 02:19:00.896019 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:19:13 crc kubenswrapper[4665]: I1205 02:19:13.893655 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:19:13 crc kubenswrapper[4665]: E1205 02:19:13.894435 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:19:27 crc kubenswrapper[4665]: I1205 02:19:27.894219 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:19:28 crc kubenswrapper[4665]: I1205 02:19:28.696270 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"9a7f31c5be7dcf08fe97e861724eaf43068f45a09a1ef09ab8e767cd3a279980"} Dec 05 02:21:44 crc kubenswrapper[4665]: I1205 02:21:44.922400 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:21:44 crc kubenswrapper[4665]: I1205 02:21:44.923026 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:22:14 crc kubenswrapper[4665]: I1205 02:22:14.922492 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:22:14 crc kubenswrapper[4665]: I1205 02:22:14.923485 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:22:44 crc kubenswrapper[4665]: I1205 02:22:44.922542 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:22:44 crc kubenswrapper[4665]: I1205 02:22:44.923233 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:22:44 crc kubenswrapper[4665]: I1205 02:22:44.923289 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 02:22:44 crc kubenswrapper[4665]: I1205 02:22:44.924139 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9a7f31c5be7dcf08fe97e861724eaf43068f45a09a1ef09ab8e767cd3a279980"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 02:22:44 crc kubenswrapper[4665]: I1205 02:22:44.924193 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://9a7f31c5be7dcf08fe97e861724eaf43068f45a09a1ef09ab8e767cd3a279980" gracePeriod=600 Dec 05 02:22:45 crc kubenswrapper[4665]: I1205 02:22:45.602044 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="9a7f31c5be7dcf08fe97e861724eaf43068f45a09a1ef09ab8e767cd3a279980" exitCode=0 Dec 05 02:22:45 crc kubenswrapper[4665]: I1205 02:22:45.602135 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"9a7f31c5be7dcf08fe97e861724eaf43068f45a09a1ef09ab8e767cd3a279980"} Dec 05 02:22:45 crc kubenswrapper[4665]: I1205 02:22:45.602439 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06"} Dec 05 02:22:45 crc kubenswrapper[4665]: I1205 02:22:45.602466 4665 scope.go:117] "RemoveContainer" containerID="35fb5d8b8cbaacea491f173cb9af86a20e1aa787416c29091e3b70432e4334a6" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.601835 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kwvmj"] Dec 05 02:23:33 crc kubenswrapper[4665]: E1205 02:23:33.609272 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="extract-utilities" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.609331 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="extract-utilities" Dec 05 02:23:33 crc kubenswrapper[4665]: E1205 02:23:33.609386 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="registry-server" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.609398 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="registry-server" Dec 05 02:23:33 crc kubenswrapper[4665]: E1205 02:23:33.609434 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="extract-content" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.609443 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="extract-content" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.610200 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="8992e79d-9fe6-49dc-b749-b3ffeaca1d9a" containerName="registry-server" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.622759 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.660088 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwvmj"] Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.729447 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-utilities\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.729541 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-catalog-content\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.729602 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg9dj\" (UniqueName: \"kubernetes.io/projected/491848a0-bc17-492c-9be1-6fec08c2448f-kube-api-access-tg9dj\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.831879 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg9dj\" (UniqueName: \"kubernetes.io/projected/491848a0-bc17-492c-9be1-6fec08c2448f-kube-api-access-tg9dj\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.832130 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-utilities\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.832184 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-catalog-content\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.832655 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-utilities\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.832818 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-catalog-content\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.851211 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg9dj\" (UniqueName: \"kubernetes.io/projected/491848a0-bc17-492c-9be1-6fec08c2448f-kube-api-access-tg9dj\") pod \"redhat-marketplace-kwvmj\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:33 crc kubenswrapper[4665]: I1205 02:23:33.975741 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:34 crc kubenswrapper[4665]: I1205 02:23:34.540540 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwvmj"] Dec 05 02:23:35 crc kubenswrapper[4665]: I1205 02:23:35.084451 4665 generic.go:334] "Generic (PLEG): container finished" podID="491848a0-bc17-492c-9be1-6fec08c2448f" containerID="4453a1b88c94802dcf4a1e25b7d0a7dd914b01bc7bf1cdb552434457176b5b9b" exitCode=0 Dec 05 02:23:35 crc kubenswrapper[4665]: I1205 02:23:35.084504 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwvmj" event={"ID":"491848a0-bc17-492c-9be1-6fec08c2448f","Type":"ContainerDied","Data":"4453a1b88c94802dcf4a1e25b7d0a7dd914b01bc7bf1cdb552434457176b5b9b"} Dec 05 02:23:35 crc kubenswrapper[4665]: I1205 02:23:35.084743 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwvmj" event={"ID":"491848a0-bc17-492c-9be1-6fec08c2448f","Type":"ContainerStarted","Data":"798111f60165ec627c6724a0e240e73447d87a895812a21a7871d33e7fac11a2"} Dec 05 02:23:35 crc kubenswrapper[4665]: I1205 02:23:35.086418 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 02:23:36 crc kubenswrapper[4665]: I1205 02:23:36.094136 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwvmj" event={"ID":"491848a0-bc17-492c-9be1-6fec08c2448f","Type":"ContainerStarted","Data":"4aa8fbb68ac684918fb073a0aa3283727d0c8994c6369d3c297dd683c499421b"} Dec 05 02:23:37 crc kubenswrapper[4665]: I1205 02:23:37.104132 4665 generic.go:334] "Generic (PLEG): container finished" podID="491848a0-bc17-492c-9be1-6fec08c2448f" containerID="4aa8fbb68ac684918fb073a0aa3283727d0c8994c6369d3c297dd683c499421b" exitCode=0 Dec 05 02:23:37 crc kubenswrapper[4665]: I1205 02:23:37.104225 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwvmj" event={"ID":"491848a0-bc17-492c-9be1-6fec08c2448f","Type":"ContainerDied","Data":"4aa8fbb68ac684918fb073a0aa3283727d0c8994c6369d3c297dd683c499421b"} Dec 05 02:23:38 crc kubenswrapper[4665]: I1205 02:23:38.115286 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwvmj" event={"ID":"491848a0-bc17-492c-9be1-6fec08c2448f","Type":"ContainerStarted","Data":"85f7ff4455f9facdf75d1e58f48012c17691f6bc12e87f81dd99589543b7f07d"} Dec 05 02:23:38 crc kubenswrapper[4665]: I1205 02:23:38.139312 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kwvmj" podStartSLOduration=2.722245844 podStartE2EDuration="5.139278435s" podCreationTimestamp="2025-12-05 02:23:33 +0000 UTC" firstStartedPulling="2025-12-05 02:23:35.085903249 +0000 UTC m=+4390.425295548" lastFinishedPulling="2025-12-05 02:23:37.50293584 +0000 UTC m=+4392.842328139" observedRunningTime="2025-12-05 02:23:38.139245674 +0000 UTC m=+4393.478637973" watchObservedRunningTime="2025-12-05 02:23:38.139278435 +0000 UTC m=+4393.478670734" Dec 05 02:23:40 crc kubenswrapper[4665]: I1205 02:23:40.984176 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-9pk2n"] Dec 05 02:23:40 crc kubenswrapper[4665]: I1205 02:23:40.986785 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.015820 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9pk2n"] Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.082830 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-catalog-content\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.082898 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z28h5\" (UniqueName: \"kubernetes.io/projected/db07d28d-034a-4f2f-a4aa-0d58cef39123-kube-api-access-z28h5\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.083108 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-utilities\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.185030 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-catalog-content\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.185342 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z28h5\" (UniqueName: \"kubernetes.io/projected/db07d28d-034a-4f2f-a4aa-0d58cef39123-kube-api-access-z28h5\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.185543 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-utilities\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.185603 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-catalog-content\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.186026 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-utilities\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.219337 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z28h5\" (UniqueName: \"kubernetes.io/projected/db07d28d-034a-4f2f-a4aa-0d58cef39123-kube-api-access-z28h5\") pod \"community-operators-9pk2n\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.307136 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:41 crc kubenswrapper[4665]: I1205 02:23:41.837570 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-9pk2n"] Dec 05 02:23:41 crc kubenswrapper[4665]: W1205 02:23:41.844686 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddb07d28d_034a_4f2f_a4aa_0d58cef39123.slice/crio-db07639de3ec359d6e33938cb0b3e1bbc1e37476776dff88c6745cf38c2b91a3 WatchSource:0}: Error finding container db07639de3ec359d6e33938cb0b3e1bbc1e37476776dff88c6745cf38c2b91a3: Status 404 returned error can't find the container with id db07639de3ec359d6e33938cb0b3e1bbc1e37476776dff88c6745cf38c2b91a3 Dec 05 02:23:42 crc kubenswrapper[4665]: I1205 02:23:42.152439 4665 generic.go:334] "Generic (PLEG): container finished" podID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerID="a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b" exitCode=0 Dec 05 02:23:42 crc kubenswrapper[4665]: I1205 02:23:42.152521 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pk2n" event={"ID":"db07d28d-034a-4f2f-a4aa-0d58cef39123","Type":"ContainerDied","Data":"a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b"} Dec 05 02:23:42 crc kubenswrapper[4665]: I1205 02:23:42.152782 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pk2n" event={"ID":"db07d28d-034a-4f2f-a4aa-0d58cef39123","Type":"ContainerStarted","Data":"db07639de3ec359d6e33938cb0b3e1bbc1e37476776dff88c6745cf38c2b91a3"} Dec 05 02:23:43 crc kubenswrapper[4665]: I1205 02:23:43.162239 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pk2n" event={"ID":"db07d28d-034a-4f2f-a4aa-0d58cef39123","Type":"ContainerStarted","Data":"97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3"} Dec 05 02:23:43 crc kubenswrapper[4665]: I1205 02:23:43.976800 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:43 crc kubenswrapper[4665]: I1205 02:23:43.977119 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:44 crc kubenswrapper[4665]: I1205 02:23:44.174912 4665 generic.go:334] "Generic (PLEG): container finished" podID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerID="97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3" exitCode=0 Dec 05 02:23:44 crc kubenswrapper[4665]: I1205 02:23:44.174957 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pk2n" event={"ID":"db07d28d-034a-4f2f-a4aa-0d58cef39123","Type":"ContainerDied","Data":"97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3"} Dec 05 02:23:44 crc kubenswrapper[4665]: I1205 02:23:44.550548 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:44 crc kubenswrapper[4665]: I1205 02:23:44.620351 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:45 crc kubenswrapper[4665]: I1205 02:23:45.184429 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pk2n" event={"ID":"db07d28d-034a-4f2f-a4aa-0d58cef39123","Type":"ContainerStarted","Data":"293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6"} Dec 05 02:23:45 crc kubenswrapper[4665]: I1205 02:23:45.220265 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-9pk2n" podStartSLOduration=2.783109599 podStartE2EDuration="5.220215947s" podCreationTimestamp="2025-12-05 02:23:40 +0000 UTC" firstStartedPulling="2025-12-05 02:23:42.154016021 +0000 UTC m=+4397.493408320" lastFinishedPulling="2025-12-05 02:23:44.591122369 +0000 UTC m=+4399.930514668" observedRunningTime="2025-12-05 02:23:45.21163042 +0000 UTC m=+4400.551022719" watchObservedRunningTime="2025-12-05 02:23:45.220215947 +0000 UTC m=+4400.559608266" Dec 05 02:23:46 crc kubenswrapper[4665]: I1205 02:23:46.966658 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwvmj"] Dec 05 02:23:46 crc kubenswrapper[4665]: I1205 02:23:46.967157 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kwvmj" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" containerName="registry-server" containerID="cri-o://85f7ff4455f9facdf75d1e58f48012c17691f6bc12e87f81dd99589543b7f07d" gracePeriod=2 Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.231029 4665 generic.go:334] "Generic (PLEG): container finished" podID="491848a0-bc17-492c-9be1-6fec08c2448f" containerID="85f7ff4455f9facdf75d1e58f48012c17691f6bc12e87f81dd99589543b7f07d" exitCode=0 Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.231396 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwvmj" event={"ID":"491848a0-bc17-492c-9be1-6fec08c2448f","Type":"ContainerDied","Data":"85f7ff4455f9facdf75d1e58f48012c17691f6bc12e87f81dd99589543b7f07d"} Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.453194 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.606876 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-utilities\") pod \"491848a0-bc17-492c-9be1-6fec08c2448f\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.607579 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-utilities" (OuterVolumeSpecName: "utilities") pod "491848a0-bc17-492c-9be1-6fec08c2448f" (UID: "491848a0-bc17-492c-9be1-6fec08c2448f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.607691 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-catalog-content\") pod \"491848a0-bc17-492c-9be1-6fec08c2448f\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.611799 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tg9dj\" (UniqueName: \"kubernetes.io/projected/491848a0-bc17-492c-9be1-6fec08c2448f-kube-api-access-tg9dj\") pod \"491848a0-bc17-492c-9be1-6fec08c2448f\" (UID: \"491848a0-bc17-492c-9be1-6fec08c2448f\") " Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.612728 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.618875 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/491848a0-bc17-492c-9be1-6fec08c2448f-kube-api-access-tg9dj" (OuterVolumeSpecName: "kube-api-access-tg9dj") pod "491848a0-bc17-492c-9be1-6fec08c2448f" (UID: "491848a0-bc17-492c-9be1-6fec08c2448f"). InnerVolumeSpecName "kube-api-access-tg9dj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.633872 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "491848a0-bc17-492c-9be1-6fec08c2448f" (UID: "491848a0-bc17-492c-9be1-6fec08c2448f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.714889 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/491848a0-bc17-492c-9be1-6fec08c2448f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:23:47 crc kubenswrapper[4665]: I1205 02:23:47.714922 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tg9dj\" (UniqueName: \"kubernetes.io/projected/491848a0-bc17-492c-9be1-6fec08c2448f-kube-api-access-tg9dj\") on node \"crc\" DevicePath \"\"" Dec 05 02:23:48 crc kubenswrapper[4665]: I1205 02:23:48.239907 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kwvmj" event={"ID":"491848a0-bc17-492c-9be1-6fec08c2448f","Type":"ContainerDied","Data":"798111f60165ec627c6724a0e240e73447d87a895812a21a7871d33e7fac11a2"} Dec 05 02:23:48 crc kubenswrapper[4665]: I1205 02:23:48.239963 4665 scope.go:117] "RemoveContainer" containerID="85f7ff4455f9facdf75d1e58f48012c17691f6bc12e87f81dd99589543b7f07d" Dec 05 02:23:48 crc kubenswrapper[4665]: I1205 02:23:48.240080 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kwvmj" Dec 05 02:23:48 crc kubenswrapper[4665]: I1205 02:23:48.260930 4665 scope.go:117] "RemoveContainer" containerID="4aa8fbb68ac684918fb073a0aa3283727d0c8994c6369d3c297dd683c499421b" Dec 05 02:23:48 crc kubenswrapper[4665]: I1205 02:23:48.283099 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwvmj"] Dec 05 02:23:48 crc kubenswrapper[4665]: I1205 02:23:48.286115 4665 scope.go:117] "RemoveContainer" containerID="4453a1b88c94802dcf4a1e25b7d0a7dd914b01bc7bf1cdb552434457176b5b9b" Dec 05 02:23:48 crc kubenswrapper[4665]: I1205 02:23:48.295220 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kwvmj"] Dec 05 02:23:48 crc kubenswrapper[4665]: I1205 02:23:48.904422 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" path="/var/lib/kubelet/pods/491848a0-bc17-492c-9be1-6fec08c2448f/volumes" Dec 05 02:23:51 crc kubenswrapper[4665]: I1205 02:23:51.307671 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:51 crc kubenswrapper[4665]: I1205 02:23:51.307932 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:51 crc kubenswrapper[4665]: I1205 02:23:51.352401 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:52 crc kubenswrapper[4665]: I1205 02:23:52.364786 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:52 crc kubenswrapper[4665]: I1205 02:23:52.415696 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9pk2n"] Dec 05 02:23:54 crc kubenswrapper[4665]: I1205 02:23:54.320148 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-9pk2n" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerName="registry-server" containerID="cri-o://293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6" gracePeriod=2 Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.202669 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.329757 4665 generic.go:334] "Generic (PLEG): container finished" podID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerID="293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6" exitCode=0 Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.329810 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pk2n" event={"ID":"db07d28d-034a-4f2f-a4aa-0d58cef39123","Type":"ContainerDied","Data":"293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6"} Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.329816 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-9pk2n" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.329839 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-9pk2n" event={"ID":"db07d28d-034a-4f2f-a4aa-0d58cef39123","Type":"ContainerDied","Data":"db07639de3ec359d6e33938cb0b3e1bbc1e37476776dff88c6745cf38c2b91a3"} Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.329856 4665 scope.go:117] "RemoveContainer" containerID="293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.357260 4665 scope.go:117] "RemoveContainer" containerID="97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.383184 4665 scope.go:117] "RemoveContainer" containerID="a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.386948 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-catalog-content\") pod \"db07d28d-034a-4f2f-a4aa-0d58cef39123\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.387122 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-utilities\") pod \"db07d28d-034a-4f2f-a4aa-0d58cef39123\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.387170 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z28h5\" (UniqueName: \"kubernetes.io/projected/db07d28d-034a-4f2f-a4aa-0d58cef39123-kube-api-access-z28h5\") pod \"db07d28d-034a-4f2f-a4aa-0d58cef39123\" (UID: \"db07d28d-034a-4f2f-a4aa-0d58cef39123\") " Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.388749 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-utilities" (OuterVolumeSpecName: "utilities") pod "db07d28d-034a-4f2f-a4aa-0d58cef39123" (UID: "db07d28d-034a-4f2f-a4aa-0d58cef39123"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.401948 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db07d28d-034a-4f2f-a4aa-0d58cef39123-kube-api-access-z28h5" (OuterVolumeSpecName: "kube-api-access-z28h5") pod "db07d28d-034a-4f2f-a4aa-0d58cef39123" (UID: "db07d28d-034a-4f2f-a4aa-0d58cef39123"). InnerVolumeSpecName "kube-api-access-z28h5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.434237 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "db07d28d-034a-4f2f-a4aa-0d58cef39123" (UID: "db07d28d-034a-4f2f-a4aa-0d58cef39123"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.453341 4665 scope.go:117] "RemoveContainer" containerID="293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6" Dec 05 02:23:55 crc kubenswrapper[4665]: E1205 02:23:55.453845 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6\": container with ID starting with 293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6 not found: ID does not exist" containerID="293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.453913 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6"} err="failed to get container status \"293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6\": rpc error: code = NotFound desc = could not find container \"293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6\": container with ID starting with 293b4a4b651e142c06a7a98f67be27f130ec05e103de615e9f44b57ebaedccd6 not found: ID does not exist" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.453950 4665 scope.go:117] "RemoveContainer" containerID="97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3" Dec 05 02:23:55 crc kubenswrapper[4665]: E1205 02:23:55.454387 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3\": container with ID starting with 97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3 not found: ID does not exist" containerID="97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.454436 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3"} err="failed to get container status \"97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3\": rpc error: code = NotFound desc = could not find container \"97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3\": container with ID starting with 97e442e2b9ba5f104c7a7b08a6df8a8292a098e4a619a4d40f61e2fabcb29ea3 not found: ID does not exist" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.454465 4665 scope.go:117] "RemoveContainer" containerID="a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b" Dec 05 02:23:55 crc kubenswrapper[4665]: E1205 02:23:55.455000 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b\": container with ID starting with a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b not found: ID does not exist" containerID="a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.455122 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b"} err="failed to get container status \"a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b\": rpc error: code = NotFound desc = could not find container \"a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b\": container with ID starting with a8bebc8c9bd8f9ebc0a9e200132190f4498d3a718f3deefec3eac7738419269b not found: ID does not exist" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.489222 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.489251 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/db07d28d-034a-4f2f-a4aa-0d58cef39123-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.489264 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z28h5\" (UniqueName: \"kubernetes.io/projected/db07d28d-034a-4f2f-a4aa-0d58cef39123-kube-api-access-z28h5\") on node \"crc\" DevicePath \"\"" Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.667396 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-9pk2n"] Dec 05 02:23:55 crc kubenswrapper[4665]: I1205 02:23:55.679979 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-9pk2n"] Dec 05 02:23:56 crc kubenswrapper[4665]: I1205 02:23:56.903246 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" path="/var/lib/kubelet/pods/db07d28d-034a-4f2f-a4aa-0d58cef39123/volumes" Dec 05 02:24:02 crc kubenswrapper[4665]: I1205 02:24:02.411077 4665 generic.go:334] "Generic (PLEG): container finished" podID="a3dcea46-0cf1-441d-84ba-0b327c396844" containerID="e000742542e570cc0d15119403498d4ede5b0c740ca856a4e9675b72d86ca4d4" exitCode=0 Dec 05 02:24:02 crc kubenswrapper[4665]: I1205 02:24:02.411151 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a3dcea46-0cf1-441d-84ba-0b327c396844","Type":"ContainerDied","Data":"e000742542e570cc0d15119403498d4ede5b0c740ca856a4e9675b72d86ca4d4"} Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.736414 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-m9mx9"] Dec 05 02:24:03 crc kubenswrapper[4665]: E1205 02:24:03.737114 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" containerName="extract-content" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.737126 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" containerName="extract-content" Dec 05 02:24:03 crc kubenswrapper[4665]: E1205 02:24:03.737145 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerName="extract-utilities" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.737151 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerName="extract-utilities" Dec 05 02:24:03 crc kubenswrapper[4665]: E1205 02:24:03.737165 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" containerName="registry-server" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.737172 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" containerName="registry-server" Dec 05 02:24:03 crc kubenswrapper[4665]: E1205 02:24:03.737189 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerName="extract-content" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.737195 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerName="extract-content" Dec 05 02:24:03 crc kubenswrapper[4665]: E1205 02:24:03.737207 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerName="registry-server" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.737214 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerName="registry-server" Dec 05 02:24:03 crc kubenswrapper[4665]: E1205 02:24:03.737231 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" containerName="extract-utilities" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.737239 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" containerName="extract-utilities" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.737468 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="db07d28d-034a-4f2f-a4aa-0d58cef39123" containerName="registry-server" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.737487 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="491848a0-bc17-492c-9be1-6fec08c2448f" containerName="registry-server" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.738840 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.749465 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m9mx9"] Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.750797 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qtjxd\" (UniqueName: \"kubernetes.io/projected/134a22b0-3ce8-413f-ad12-ca8060510b3c-kube-api-access-qtjxd\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.750854 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-catalog-content\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.750874 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-utilities\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.828661 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852407 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-config-data\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852540 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ssh-key\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852629 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-temporary\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852690 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852711 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccwvt\" (UniqueName: \"kubernetes.io/projected/a3dcea46-0cf1-441d-84ba-0b327c396844-kube-api-access-ccwvt\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852776 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config-secret\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852823 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852844 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-workdir\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.852870 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ca-certs\") pod \"a3dcea46-0cf1-441d-84ba-0b327c396844\" (UID: \"a3dcea46-0cf1-441d-84ba-0b327c396844\") " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.853254 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qtjxd\" (UniqueName: \"kubernetes.io/projected/134a22b0-3ce8-413f-ad12-ca8060510b3c-kube-api-access-qtjxd\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.853346 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-catalog-content\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.853367 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-utilities\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.854019 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-catalog-content\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.854046 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-utilities\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.854666 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.856786 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-config-data" (OuterVolumeSpecName: "config-data") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.879486 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3dcea46-0cf1-441d-84ba-0b327c396844-kube-api-access-ccwvt" (OuterVolumeSpecName: "kube-api-access-ccwvt") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "kube-api-access-ccwvt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.882164 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage05-crc" (OuterVolumeSpecName: "test-operator-logs") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "local-storage05-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.889251 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.903108 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qtjxd\" (UniqueName: \"kubernetes.io/projected/134a22b0-3ce8-413f-ad12-ca8060510b3c-kube-api-access-qtjxd\") pod \"certified-operators-m9mx9\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.909556 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.925257 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.939782 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.955377 4665 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.960889 4665 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" " Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.960927 4665 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.960955 4665 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.960975 4665 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.960989 4665 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/a3dcea46-0cf1-441d-84ba-0b327c396844-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.961002 4665 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/a3dcea46-0cf1-441d-84ba-0b327c396844-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.961017 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccwvt\" (UniqueName: \"kubernetes.io/projected/a3dcea46-0cf1-441d-84ba-0b327c396844-kube-api-access-ccwvt\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.972024 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "a3dcea46-0cf1-441d-84ba-0b327c396844" (UID: "a3dcea46-0cf1-441d-84ba-0b327c396844"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 02:24:03 crc kubenswrapper[4665]: I1205 02:24:03.985029 4665 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage05-crc" (UniqueName: "kubernetes.io/local-volume/local-storage05-crc") on node "crc" Dec 05 02:24:04 crc kubenswrapper[4665]: I1205 02:24:04.062907 4665 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/a3dcea46-0cf1-441d-84ba-0b327c396844-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:04 crc kubenswrapper[4665]: I1205 02:24:04.062952 4665 reconciler_common.go:293] "Volume detached for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:04 crc kubenswrapper[4665]: I1205 02:24:04.142446 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:04 crc kubenswrapper[4665]: I1205 02:24:04.441674 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"a3dcea46-0cf1-441d-84ba-0b327c396844","Type":"ContainerDied","Data":"6a7a4ca74103ec0386864df1bf305d5d63d2648735a2a89ba040c2779e71a22d"} Dec 05 02:24:04 crc kubenswrapper[4665]: I1205 02:24:04.441951 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a7a4ca74103ec0386864df1bf305d5d63d2648735a2a89ba040c2779e71a22d" Dec 05 02:24:04 crc kubenswrapper[4665]: I1205 02:24:04.441775 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 02:24:04 crc kubenswrapper[4665]: I1205 02:24:04.745428 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-m9mx9"] Dec 05 02:24:05 crc kubenswrapper[4665]: I1205 02:24:05.469232 4665 generic.go:334] "Generic (PLEG): container finished" podID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerID="f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6" exitCode=0 Dec 05 02:24:05 crc kubenswrapper[4665]: I1205 02:24:05.469331 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9mx9" event={"ID":"134a22b0-3ce8-413f-ad12-ca8060510b3c","Type":"ContainerDied","Data":"f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6"} Dec 05 02:24:05 crc kubenswrapper[4665]: I1205 02:24:05.469758 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9mx9" event={"ID":"134a22b0-3ce8-413f-ad12-ca8060510b3c","Type":"ContainerStarted","Data":"a59d8b4c98cfc54dd2fb0e5d8405320a2a4690d58fb964fd46691c79b5312a25"} Dec 05 02:24:06 crc kubenswrapper[4665]: I1205 02:24:06.482325 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9mx9" event={"ID":"134a22b0-3ce8-413f-ad12-ca8060510b3c","Type":"ContainerStarted","Data":"92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79"} Dec 05 02:24:07 crc kubenswrapper[4665]: I1205 02:24:07.492433 4665 generic.go:334] "Generic (PLEG): container finished" podID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerID="92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79" exitCode=0 Dec 05 02:24:07 crc kubenswrapper[4665]: I1205 02:24:07.492476 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9mx9" event={"ID":"134a22b0-3ce8-413f-ad12-ca8060510b3c","Type":"ContainerDied","Data":"92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79"} Dec 05 02:24:08 crc kubenswrapper[4665]: I1205 02:24:08.503515 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9mx9" event={"ID":"134a22b0-3ce8-413f-ad12-ca8060510b3c","Type":"ContainerStarted","Data":"f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e"} Dec 05 02:24:08 crc kubenswrapper[4665]: I1205 02:24:08.540981 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-m9mx9" podStartSLOduration=3.131023889 podStartE2EDuration="5.540951869s" podCreationTimestamp="2025-12-05 02:24:03 +0000 UTC" firstStartedPulling="2025-12-05 02:24:05.471851233 +0000 UTC m=+4420.811243532" lastFinishedPulling="2025-12-05 02:24:07.881779213 +0000 UTC m=+4423.221171512" observedRunningTime="2025-12-05 02:24:08.531753516 +0000 UTC m=+4423.871145825" watchObservedRunningTime="2025-12-05 02:24:08.540951869 +0000 UTC m=+4423.880344198" Dec 05 02:24:09 crc kubenswrapper[4665]: I1205 02:24:09.964444 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 02:24:09 crc kubenswrapper[4665]: E1205 02:24:09.965185 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dcea46-0cf1-441d-84ba-0b327c396844" containerName="tempest-tests-tempest-tests-runner" Dec 05 02:24:09 crc kubenswrapper[4665]: I1205 02:24:09.965200 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dcea46-0cf1-441d-84ba-0b327c396844" containerName="tempest-tests-tempest-tests-runner" Dec 05 02:24:09 crc kubenswrapper[4665]: I1205 02:24:09.965439 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3dcea46-0cf1-441d-84ba-0b327c396844" containerName="tempest-tests-tempest-tests-runner" Dec 05 02:24:09 crc kubenswrapper[4665]: I1205 02:24:09.966140 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:09 crc kubenswrapper[4665]: I1205 02:24:09.996505 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.003324 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-6sdph" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.101492 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cfdb58d0-875d-49d7-82ef-13bc4785a25c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.101739 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-frnqk\" (UniqueName: \"kubernetes.io/projected/cfdb58d0-875d-49d7-82ef-13bc4785a25c-kube-api-access-frnqk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cfdb58d0-875d-49d7-82ef-13bc4785a25c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.203669 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-frnqk\" (UniqueName: \"kubernetes.io/projected/cfdb58d0-875d-49d7-82ef-13bc4785a25c-kube-api-access-frnqk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cfdb58d0-875d-49d7-82ef-13bc4785a25c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.204220 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cfdb58d0-875d-49d7-82ef-13bc4785a25c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.205167 4665 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cfdb58d0-875d-49d7-82ef-13bc4785a25c\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.226623 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-frnqk\" (UniqueName: \"kubernetes.io/projected/cfdb58d0-875d-49d7-82ef-13bc4785a25c-kube-api-access-frnqk\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cfdb58d0-875d-49d7-82ef-13bc4785a25c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.254819 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"cfdb58d0-875d-49d7-82ef-13bc4785a25c\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.334452 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 02:24:10 crc kubenswrapper[4665]: I1205 02:24:10.778087 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 02:24:11 crc kubenswrapper[4665]: I1205 02:24:11.531846 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"cfdb58d0-875d-49d7-82ef-13bc4785a25c","Type":"ContainerStarted","Data":"e29a4079cfbf96705cf513da51692511fd47de958140ea1ce8367c65f315a46e"} Dec 05 02:24:12 crc kubenswrapper[4665]: I1205 02:24:12.542457 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"cfdb58d0-875d-49d7-82ef-13bc4785a25c","Type":"ContainerStarted","Data":"10992a38f2f9b9e67f7ec93b9f56f462885ae2d5c8e2a6d17edb7120cb8537cd"} Dec 05 02:24:12 crc kubenswrapper[4665]: I1205 02:24:12.563679 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=2.284332328 podStartE2EDuration="3.563659896s" podCreationTimestamp="2025-12-05 02:24:09 +0000 UTC" firstStartedPulling="2025-12-05 02:24:10.774516634 +0000 UTC m=+4426.113908963" lastFinishedPulling="2025-12-05 02:24:12.053844222 +0000 UTC m=+4427.393236531" observedRunningTime="2025-12-05 02:24:12.557060627 +0000 UTC m=+4427.896452956" watchObservedRunningTime="2025-12-05 02:24:12.563659896 +0000 UTC m=+4427.903052205" Dec 05 02:24:14 crc kubenswrapper[4665]: I1205 02:24:14.143666 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:14 crc kubenswrapper[4665]: I1205 02:24:14.143999 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:14 crc kubenswrapper[4665]: I1205 02:24:14.386377 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:14 crc kubenswrapper[4665]: I1205 02:24:14.596539 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:14 crc kubenswrapper[4665]: I1205 02:24:14.657267 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m9mx9"] Dec 05 02:24:16 crc kubenswrapper[4665]: I1205 02:24:16.586569 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-m9mx9" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerName="registry-server" containerID="cri-o://f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e" gracePeriod=2 Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.263583 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.349226 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-catalog-content\") pod \"134a22b0-3ce8-413f-ad12-ca8060510b3c\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.349264 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-utilities\") pod \"134a22b0-3ce8-413f-ad12-ca8060510b3c\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.349376 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qtjxd\" (UniqueName: \"kubernetes.io/projected/134a22b0-3ce8-413f-ad12-ca8060510b3c-kube-api-access-qtjxd\") pod \"134a22b0-3ce8-413f-ad12-ca8060510b3c\" (UID: \"134a22b0-3ce8-413f-ad12-ca8060510b3c\") " Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.350540 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-utilities" (OuterVolumeSpecName: "utilities") pod "134a22b0-3ce8-413f-ad12-ca8060510b3c" (UID: "134a22b0-3ce8-413f-ad12-ca8060510b3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.398249 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "134a22b0-3ce8-413f-ad12-ca8060510b3c" (UID: "134a22b0-3ce8-413f-ad12-ca8060510b3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.404951 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/134a22b0-3ce8-413f-ad12-ca8060510b3c-kube-api-access-qtjxd" (OuterVolumeSpecName: "kube-api-access-qtjxd") pod "134a22b0-3ce8-413f-ad12-ca8060510b3c" (UID: "134a22b0-3ce8-413f-ad12-ca8060510b3c"). InnerVolumeSpecName "kube-api-access-qtjxd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.451192 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.451227 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/134a22b0-3ce8-413f-ad12-ca8060510b3c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.451237 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qtjxd\" (UniqueName: \"kubernetes.io/projected/134a22b0-3ce8-413f-ad12-ca8060510b3c-kube-api-access-qtjxd\") on node \"crc\" DevicePath \"\"" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.602953 4665 generic.go:334] "Generic (PLEG): container finished" podID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerID="f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e" exitCode=0 Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.603001 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9mx9" event={"ID":"134a22b0-3ce8-413f-ad12-ca8060510b3c","Type":"ContainerDied","Data":"f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e"} Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.603024 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-m9mx9" event={"ID":"134a22b0-3ce8-413f-ad12-ca8060510b3c","Type":"ContainerDied","Data":"a59d8b4c98cfc54dd2fb0e5d8405320a2a4690d58fb964fd46691c79b5312a25"} Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.603058 4665 scope.go:117] "RemoveContainer" containerID="f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.603182 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-m9mx9" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.633027 4665 scope.go:117] "RemoveContainer" containerID="92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.651639 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-m9mx9"] Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.657965 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-m9mx9"] Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.661546 4665 scope.go:117] "RemoveContainer" containerID="f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.699100 4665 scope.go:117] "RemoveContainer" containerID="f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e" Dec 05 02:24:17 crc kubenswrapper[4665]: E1205 02:24:17.699598 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e\": container with ID starting with f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e not found: ID does not exist" containerID="f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.699639 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e"} err="failed to get container status \"f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e\": rpc error: code = NotFound desc = could not find container \"f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e\": container with ID starting with f7906e8832338dc29be85a9b1306f6dd81bbfa223d5749f436ab687d26607d1e not found: ID does not exist" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.699665 4665 scope.go:117] "RemoveContainer" containerID="92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79" Dec 05 02:24:17 crc kubenswrapper[4665]: E1205 02:24:17.700114 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79\": container with ID starting with 92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79 not found: ID does not exist" containerID="92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.700138 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79"} err="failed to get container status \"92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79\": rpc error: code = NotFound desc = could not find container \"92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79\": container with ID starting with 92689629d050a48371edf5494100a090329cde77b135a83625f480e873da4e79 not found: ID does not exist" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.700158 4665 scope.go:117] "RemoveContainer" containerID="f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6" Dec 05 02:24:17 crc kubenswrapper[4665]: E1205 02:24:17.700392 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6\": container with ID starting with f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6 not found: ID does not exist" containerID="f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6" Dec 05 02:24:17 crc kubenswrapper[4665]: I1205 02:24:17.700410 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6"} err="failed to get container status \"f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6\": rpc error: code = NotFound desc = could not find container \"f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6\": container with ID starting with f752fc53879b45b9c5a99138676cff51c90d8a6002a1d526c08d7113f44bece6 not found: ID does not exist" Dec 05 02:24:18 crc kubenswrapper[4665]: I1205 02:24:18.916222 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" path="/var/lib/kubelet/pods/134a22b0-3ce8-413f-ad12-ca8060510b3c/volumes" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.883149 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-s297g/must-gather-bd7lg"] Dec 05 02:24:37 crc kubenswrapper[4665]: E1205 02:24:37.884137 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerName="registry-server" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.884153 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerName="registry-server" Dec 05 02:24:37 crc kubenswrapper[4665]: E1205 02:24:37.884181 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerName="extract-content" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.884190 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerName="extract-content" Dec 05 02:24:37 crc kubenswrapper[4665]: E1205 02:24:37.884220 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerName="extract-utilities" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.884229 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerName="extract-utilities" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.884524 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="134a22b0-3ce8-413f-ad12-ca8060510b3c" containerName="registry-server" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.885759 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.894103 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-s297g"/"kube-root-ca.crt" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.894134 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-s297g"/"default-dockercfg-hxmqw" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.894332 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-s297g"/"openshift-service-ca.crt" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.896827 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-s297g/must-gather-bd7lg"] Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.936222 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b736e7-0900-478d-a341-d41d78220399-must-gather-output\") pod \"must-gather-bd7lg\" (UID: \"25b736e7-0900-478d-a341-d41d78220399\") " pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:24:37 crc kubenswrapper[4665]: I1205 02:24:37.936558 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5l7dl\" (UniqueName: \"kubernetes.io/projected/25b736e7-0900-478d-a341-d41d78220399-kube-api-access-5l7dl\") pod \"must-gather-bd7lg\" (UID: \"25b736e7-0900-478d-a341-d41d78220399\") " pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:24:38 crc kubenswrapper[4665]: I1205 02:24:38.038370 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b736e7-0900-478d-a341-d41d78220399-must-gather-output\") pod \"must-gather-bd7lg\" (UID: \"25b736e7-0900-478d-a341-d41d78220399\") " pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:24:38 crc kubenswrapper[4665]: I1205 02:24:38.038515 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5l7dl\" (UniqueName: \"kubernetes.io/projected/25b736e7-0900-478d-a341-d41d78220399-kube-api-access-5l7dl\") pod \"must-gather-bd7lg\" (UID: \"25b736e7-0900-478d-a341-d41d78220399\") " pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:24:38 crc kubenswrapper[4665]: I1205 02:24:38.038809 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b736e7-0900-478d-a341-d41d78220399-must-gather-output\") pod \"must-gather-bd7lg\" (UID: \"25b736e7-0900-478d-a341-d41d78220399\") " pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:24:38 crc kubenswrapper[4665]: I1205 02:24:38.058570 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5l7dl\" (UniqueName: \"kubernetes.io/projected/25b736e7-0900-478d-a341-d41d78220399-kube-api-access-5l7dl\") pod \"must-gather-bd7lg\" (UID: \"25b736e7-0900-478d-a341-d41d78220399\") " pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:24:38 crc kubenswrapper[4665]: I1205 02:24:38.209094 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:24:38 crc kubenswrapper[4665]: I1205 02:24:38.695909 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-s297g/must-gather-bd7lg"] Dec 05 02:24:38 crc kubenswrapper[4665]: I1205 02:24:38.909705 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/must-gather-bd7lg" event={"ID":"25b736e7-0900-478d-a341-d41d78220399","Type":"ContainerStarted","Data":"1e56f5070378ce22bab0ace118c6d03c8595d532bd6fe237e42fb48494325435"} Dec 05 02:24:39 crc kubenswrapper[4665]: I1205 02:24:39.420095 4665 scope.go:117] "RemoveContainer" containerID="dcb230ce19efdd545231a804f7a9dfa3e35e085704d747f901d71d631bc43539" Dec 05 02:24:39 crc kubenswrapper[4665]: I1205 02:24:39.455031 4665 scope.go:117] "RemoveContainer" containerID="1f66e9ea64ed1314aeb7cf4bfa3cfb722938b04d09edc6678c0e3f12296f94e5" Dec 05 02:24:39 crc kubenswrapper[4665]: I1205 02:24:39.485593 4665 scope.go:117] "RemoveContainer" containerID="1e176b3dff520b212db425a0ffa4a9d7d2ca38bcaf1976249351d9173dac8427" Dec 05 02:24:43 crc kubenswrapper[4665]: I1205 02:24:43.957619 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/must-gather-bd7lg" event={"ID":"25b736e7-0900-478d-a341-d41d78220399","Type":"ContainerStarted","Data":"a26da79a229840ec4ad97e7f56e4502795b9ef57d516059e689a9bd3ccadccd8"} Dec 05 02:24:44 crc kubenswrapper[4665]: I1205 02:24:44.966279 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/must-gather-bd7lg" event={"ID":"25b736e7-0900-478d-a341-d41d78220399","Type":"ContainerStarted","Data":"7e7c5f4be7efb7ae1bd04ee74f3456ddce473de1354941cf2ce584d4faa45873"} Dec 05 02:24:44 crc kubenswrapper[4665]: I1205 02:24:44.987067 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-s297g/must-gather-bd7lg" podStartSLOduration=3.47971372 podStartE2EDuration="7.987051545s" podCreationTimestamp="2025-12-05 02:24:37 +0000 UTC" firstStartedPulling="2025-12-05 02:24:38.717283563 +0000 UTC m=+4454.056675862" lastFinishedPulling="2025-12-05 02:24:43.224621388 +0000 UTC m=+4458.564013687" observedRunningTime="2025-12-05 02:24:44.978809196 +0000 UTC m=+4460.318201495" watchObservedRunningTime="2025-12-05 02:24:44.987051545 +0000 UTC m=+4460.326443844" Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.252433 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-s297g/crc-debug-lst27"] Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.254116 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.367393 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a3e34493-cc20-4a8a-a276-1253e6a833f4-host\") pod \"crc-debug-lst27\" (UID: \"a3e34493-cc20-4a8a-a276-1253e6a833f4\") " pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.367842 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j4m8f\" (UniqueName: \"kubernetes.io/projected/a3e34493-cc20-4a8a-a276-1253e6a833f4-kube-api-access-j4m8f\") pod \"crc-debug-lst27\" (UID: \"a3e34493-cc20-4a8a-a276-1253e6a833f4\") " pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.469364 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j4m8f\" (UniqueName: \"kubernetes.io/projected/a3e34493-cc20-4a8a-a276-1253e6a833f4-kube-api-access-j4m8f\") pod \"crc-debug-lst27\" (UID: \"a3e34493-cc20-4a8a-a276-1253e6a833f4\") " pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.469472 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a3e34493-cc20-4a8a-a276-1253e6a833f4-host\") pod \"crc-debug-lst27\" (UID: \"a3e34493-cc20-4a8a-a276-1253e6a833f4\") " pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.469599 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a3e34493-cc20-4a8a-a276-1253e6a833f4-host\") pod \"crc-debug-lst27\" (UID: \"a3e34493-cc20-4a8a-a276-1253e6a833f4\") " pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.509236 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j4m8f\" (UniqueName: \"kubernetes.io/projected/a3e34493-cc20-4a8a-a276-1253e6a833f4-kube-api-access-j4m8f\") pod \"crc-debug-lst27\" (UID: \"a3e34493-cc20-4a8a-a276-1253e6a833f4\") " pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:24:48 crc kubenswrapper[4665]: I1205 02:24:48.573139 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:24:49 crc kubenswrapper[4665]: I1205 02:24:49.004651 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/crc-debug-lst27" event={"ID":"a3e34493-cc20-4a8a-a276-1253e6a833f4","Type":"ContainerStarted","Data":"74326dc847af73317b7ce1f6a90fd3747424262787c096d5f7c091f49c9dd5a4"} Dec 05 02:25:01 crc kubenswrapper[4665]: I1205 02:25:01.119581 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/crc-debug-lst27" event={"ID":"a3e34493-cc20-4a8a-a276-1253e6a833f4","Type":"ContainerStarted","Data":"3573fd1ce9542373371068acdaee1c50e5123a8b7b8a64939499497df7eaf461"} Dec 05 02:25:01 crc kubenswrapper[4665]: I1205 02:25:01.140576 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-s297g/crc-debug-lst27" podStartSLOduration=1.240494193 podStartE2EDuration="13.140559788s" podCreationTimestamp="2025-12-05 02:24:48 +0000 UTC" firstStartedPulling="2025-12-05 02:24:48.646947624 +0000 UTC m=+4463.986339923" lastFinishedPulling="2025-12-05 02:25:00.547013219 +0000 UTC m=+4475.886405518" observedRunningTime="2025-12-05 02:25:01.13317413 +0000 UTC m=+4476.472566439" watchObservedRunningTime="2025-12-05 02:25:01.140559788 +0000 UTC m=+4476.479952087" Dec 05 02:25:14 crc kubenswrapper[4665]: I1205 02:25:14.922134 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:25:14 crc kubenswrapper[4665]: I1205 02:25:14.922802 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:25:44 crc kubenswrapper[4665]: I1205 02:25:44.922535 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:25:44 crc kubenswrapper[4665]: I1205 02:25:44.923181 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:25:50 crc kubenswrapper[4665]: I1205 02:25:50.560526 4665 generic.go:334] "Generic (PLEG): container finished" podID="a3e34493-cc20-4a8a-a276-1253e6a833f4" containerID="3573fd1ce9542373371068acdaee1c50e5123a8b7b8a64939499497df7eaf461" exitCode=0 Dec 05 02:25:50 crc kubenswrapper[4665]: I1205 02:25:50.560623 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/crc-debug-lst27" event={"ID":"a3e34493-cc20-4a8a-a276-1253e6a833f4","Type":"ContainerDied","Data":"3573fd1ce9542373371068acdaee1c50e5123a8b7b8a64939499497df7eaf461"} Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.682372 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.717459 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-s297g/crc-debug-lst27"] Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.724992 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-s297g/crc-debug-lst27"] Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.815766 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a3e34493-cc20-4a8a-a276-1253e6a833f4-host\") pod \"a3e34493-cc20-4a8a-a276-1253e6a833f4\" (UID: \"a3e34493-cc20-4a8a-a276-1253e6a833f4\") " Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.815954 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j4m8f\" (UniqueName: \"kubernetes.io/projected/a3e34493-cc20-4a8a-a276-1253e6a833f4-kube-api-access-j4m8f\") pod \"a3e34493-cc20-4a8a-a276-1253e6a833f4\" (UID: \"a3e34493-cc20-4a8a-a276-1253e6a833f4\") " Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.816092 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3e34493-cc20-4a8a-a276-1253e6a833f4-host" (OuterVolumeSpecName: "host") pod "a3e34493-cc20-4a8a-a276-1253e6a833f4" (UID: "a3e34493-cc20-4a8a-a276-1253e6a833f4"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.816493 4665 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/a3e34493-cc20-4a8a-a276-1253e6a833f4-host\") on node \"crc\" DevicePath \"\"" Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.822636 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3e34493-cc20-4a8a-a276-1253e6a833f4-kube-api-access-j4m8f" (OuterVolumeSpecName: "kube-api-access-j4m8f") pod "a3e34493-cc20-4a8a-a276-1253e6a833f4" (UID: "a3e34493-cc20-4a8a-a276-1253e6a833f4"). InnerVolumeSpecName "kube-api-access-j4m8f". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:25:51 crc kubenswrapper[4665]: I1205 02:25:51.919197 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j4m8f\" (UniqueName: \"kubernetes.io/projected/a3e34493-cc20-4a8a-a276-1253e6a833f4-kube-api-access-j4m8f\") on node \"crc\" DevicePath \"\"" Dec 05 02:25:52 crc kubenswrapper[4665]: I1205 02:25:52.579071 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-lst27" Dec 05 02:25:52 crc kubenswrapper[4665]: I1205 02:25:52.579191 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="74326dc847af73317b7ce1f6a90fd3747424262787c096d5f7c091f49c9dd5a4" Dec 05 02:25:52 crc kubenswrapper[4665]: I1205 02:25:52.920569 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3e34493-cc20-4a8a-a276-1253e6a833f4" path="/var/lib/kubelet/pods/a3e34493-cc20-4a8a-a276-1253e6a833f4/volumes" Dec 05 02:25:52 crc kubenswrapper[4665]: I1205 02:25:52.921682 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-s297g/crc-debug-sjj2l"] Dec 05 02:25:52 crc kubenswrapper[4665]: E1205 02:25:52.922673 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3e34493-cc20-4a8a-a276-1253e6a833f4" containerName="container-00" Dec 05 02:25:52 crc kubenswrapper[4665]: I1205 02:25:52.922722 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3e34493-cc20-4a8a-a276-1253e6a833f4" containerName="container-00" Dec 05 02:25:52 crc kubenswrapper[4665]: I1205 02:25:52.924100 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3e34493-cc20-4a8a-a276-1253e6a833f4" containerName="container-00" Dec 05 02:25:52 crc kubenswrapper[4665]: I1205 02:25:52.925189 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.040935 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43594ca2-fd6b-41b2-a8dc-e0152110fc05-host\") pod \"crc-debug-sjj2l\" (UID: \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\") " pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.041273 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wk2d5\" (UniqueName: \"kubernetes.io/projected/43594ca2-fd6b-41b2-a8dc-e0152110fc05-kube-api-access-wk2d5\") pod \"crc-debug-sjj2l\" (UID: \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\") " pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.143773 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43594ca2-fd6b-41b2-a8dc-e0152110fc05-host\") pod \"crc-debug-sjj2l\" (UID: \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\") " pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.143940 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wk2d5\" (UniqueName: \"kubernetes.io/projected/43594ca2-fd6b-41b2-a8dc-e0152110fc05-kube-api-access-wk2d5\") pod \"crc-debug-sjj2l\" (UID: \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\") " pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.143948 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43594ca2-fd6b-41b2-a8dc-e0152110fc05-host\") pod \"crc-debug-sjj2l\" (UID: \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\") " pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.171585 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wk2d5\" (UniqueName: \"kubernetes.io/projected/43594ca2-fd6b-41b2-a8dc-e0152110fc05-kube-api-access-wk2d5\") pod \"crc-debug-sjj2l\" (UID: \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\") " pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.249462 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.587972 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/crc-debug-sjj2l" event={"ID":"43594ca2-fd6b-41b2-a8dc-e0152110fc05","Type":"ContainerStarted","Data":"68cf7b926e72f9b1a55fd09d6fe2c0d644a08a376c18398a200371696139b1bb"} Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.588237 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/crc-debug-sjj2l" event={"ID":"43594ca2-fd6b-41b2-a8dc-e0152110fc05","Type":"ContainerStarted","Data":"67a659678728bd247d127a803cfe69622314968a9f1ef7eeeda3dcf86380e77c"} Dec 05 02:25:53 crc kubenswrapper[4665]: I1205 02:25:53.604733 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-s297g/crc-debug-sjj2l" podStartSLOduration=1.604714826 podStartE2EDuration="1.604714826s" podCreationTimestamp="2025-12-05 02:25:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 02:25:53.601124849 +0000 UTC m=+4528.940517158" watchObservedRunningTime="2025-12-05 02:25:53.604714826 +0000 UTC m=+4528.944107125" Dec 05 02:25:54 crc kubenswrapper[4665]: I1205 02:25:54.609188 4665 generic.go:334] "Generic (PLEG): container finished" podID="43594ca2-fd6b-41b2-a8dc-e0152110fc05" containerID="68cf7b926e72f9b1a55fd09d6fe2c0d644a08a376c18398a200371696139b1bb" exitCode=0 Dec 05 02:25:54 crc kubenswrapper[4665]: I1205 02:25:54.610272 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/crc-debug-sjj2l" event={"ID":"43594ca2-fd6b-41b2-a8dc-e0152110fc05","Type":"ContainerDied","Data":"68cf7b926e72f9b1a55fd09d6fe2c0d644a08a376c18398a200371696139b1bb"} Dec 05 02:25:55 crc kubenswrapper[4665]: I1205 02:25:55.737415 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:55 crc kubenswrapper[4665]: I1205 02:25:55.904012 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43594ca2-fd6b-41b2-a8dc-e0152110fc05-host\") pod \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\" (UID: \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\") " Dec 05 02:25:55 crc kubenswrapper[4665]: I1205 02:25:55.904160 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wk2d5\" (UniqueName: \"kubernetes.io/projected/43594ca2-fd6b-41b2-a8dc-e0152110fc05-kube-api-access-wk2d5\") pod \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\" (UID: \"43594ca2-fd6b-41b2-a8dc-e0152110fc05\") " Dec 05 02:25:55 crc kubenswrapper[4665]: I1205 02:25:55.904663 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/43594ca2-fd6b-41b2-a8dc-e0152110fc05-host" (OuterVolumeSpecName: "host") pod "43594ca2-fd6b-41b2-a8dc-e0152110fc05" (UID: "43594ca2-fd6b-41b2-a8dc-e0152110fc05"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 02:25:55 crc kubenswrapper[4665]: I1205 02:25:55.919723 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43594ca2-fd6b-41b2-a8dc-e0152110fc05-kube-api-access-wk2d5" (OuterVolumeSpecName: "kube-api-access-wk2d5") pod "43594ca2-fd6b-41b2-a8dc-e0152110fc05" (UID: "43594ca2-fd6b-41b2-a8dc-e0152110fc05"). InnerVolumeSpecName "kube-api-access-wk2d5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:25:56 crc kubenswrapper[4665]: I1205 02:25:56.006360 4665 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/43594ca2-fd6b-41b2-a8dc-e0152110fc05-host\") on node \"crc\" DevicePath \"\"" Dec 05 02:25:56 crc kubenswrapper[4665]: I1205 02:25:56.006583 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wk2d5\" (UniqueName: \"kubernetes.io/projected/43594ca2-fd6b-41b2-a8dc-e0152110fc05-kube-api-access-wk2d5\") on node \"crc\" DevicePath \"\"" Dec 05 02:25:56 crc kubenswrapper[4665]: I1205 02:25:56.021595 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-s297g/crc-debug-sjj2l"] Dec 05 02:25:56 crc kubenswrapper[4665]: I1205 02:25:56.029814 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-s297g/crc-debug-sjj2l"] Dec 05 02:25:56 crc kubenswrapper[4665]: I1205 02:25:56.631073 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="67a659678728bd247d127a803cfe69622314968a9f1ef7eeeda3dcf86380e77c" Dec 05 02:25:56 crc kubenswrapper[4665]: I1205 02:25:56.631186 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-sjj2l" Dec 05 02:25:56 crc kubenswrapper[4665]: I1205 02:25:56.905444 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43594ca2-fd6b-41b2-a8dc-e0152110fc05" path="/var/lib/kubelet/pods/43594ca2-fd6b-41b2-a8dc-e0152110fc05/volumes" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.257899 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-s297g/crc-debug-cgrx7"] Dec 05 02:25:57 crc kubenswrapper[4665]: E1205 02:25:57.258744 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="43594ca2-fd6b-41b2-a8dc-e0152110fc05" containerName="container-00" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.258866 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="43594ca2-fd6b-41b2-a8dc-e0152110fc05" containerName="container-00" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.259195 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="43594ca2-fd6b-41b2-a8dc-e0152110fc05" containerName="container-00" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.260086 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.429251 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz92z\" (UniqueName: \"kubernetes.io/projected/7637e161-9969-46a7-b0b0-97fd7d14a225-kube-api-access-hz92z\") pod \"crc-debug-cgrx7\" (UID: \"7637e161-9969-46a7-b0b0-97fd7d14a225\") " pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.429351 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7637e161-9969-46a7-b0b0-97fd7d14a225-host\") pod \"crc-debug-cgrx7\" (UID: \"7637e161-9969-46a7-b0b0-97fd7d14a225\") " pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.531254 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz92z\" (UniqueName: \"kubernetes.io/projected/7637e161-9969-46a7-b0b0-97fd7d14a225-kube-api-access-hz92z\") pod \"crc-debug-cgrx7\" (UID: \"7637e161-9969-46a7-b0b0-97fd7d14a225\") " pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.531945 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7637e161-9969-46a7-b0b0-97fd7d14a225-host\") pod \"crc-debug-cgrx7\" (UID: \"7637e161-9969-46a7-b0b0-97fd7d14a225\") " pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.532070 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7637e161-9969-46a7-b0b0-97fd7d14a225-host\") pod \"crc-debug-cgrx7\" (UID: \"7637e161-9969-46a7-b0b0-97fd7d14a225\") " pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.550939 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz92z\" (UniqueName: \"kubernetes.io/projected/7637e161-9969-46a7-b0b0-97fd7d14a225-kube-api-access-hz92z\") pod \"crc-debug-cgrx7\" (UID: \"7637e161-9969-46a7-b0b0-97fd7d14a225\") " pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.576684 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:57 crc kubenswrapper[4665]: W1205 02:25:57.621785 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7637e161_9969_46a7_b0b0_97fd7d14a225.slice/crio-47703fd8cc059e409329c968651208eb00d9da9cdc9f25e775730857c8ff629a WatchSource:0}: Error finding container 47703fd8cc059e409329c968651208eb00d9da9cdc9f25e775730857c8ff629a: Status 404 returned error can't find the container with id 47703fd8cc059e409329c968651208eb00d9da9cdc9f25e775730857c8ff629a Dec 05 02:25:57 crc kubenswrapper[4665]: I1205 02:25:57.645977 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/crc-debug-cgrx7" event={"ID":"7637e161-9969-46a7-b0b0-97fd7d14a225","Type":"ContainerStarted","Data":"47703fd8cc059e409329c968651208eb00d9da9cdc9f25e775730857c8ff629a"} Dec 05 02:25:58 crc kubenswrapper[4665]: I1205 02:25:58.656777 4665 generic.go:334] "Generic (PLEG): container finished" podID="7637e161-9969-46a7-b0b0-97fd7d14a225" containerID="e10b23916a5758508cf60621c1b881bc04a32ef199abd815991301d128289909" exitCode=0 Dec 05 02:25:58 crc kubenswrapper[4665]: I1205 02:25:58.656851 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/crc-debug-cgrx7" event={"ID":"7637e161-9969-46a7-b0b0-97fd7d14a225","Type":"ContainerDied","Data":"e10b23916a5758508cf60621c1b881bc04a32ef199abd815991301d128289909"} Dec 05 02:25:58 crc kubenswrapper[4665]: I1205 02:25:58.698906 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-s297g/crc-debug-cgrx7"] Dec 05 02:25:58 crc kubenswrapper[4665]: I1205 02:25:58.706119 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-s297g/crc-debug-cgrx7"] Dec 05 02:25:59 crc kubenswrapper[4665]: I1205 02:25:59.765685 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:25:59 crc kubenswrapper[4665]: I1205 02:25:59.872997 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hz92z\" (UniqueName: \"kubernetes.io/projected/7637e161-9969-46a7-b0b0-97fd7d14a225-kube-api-access-hz92z\") pod \"7637e161-9969-46a7-b0b0-97fd7d14a225\" (UID: \"7637e161-9969-46a7-b0b0-97fd7d14a225\") " Dec 05 02:25:59 crc kubenswrapper[4665]: I1205 02:25:59.873306 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7637e161-9969-46a7-b0b0-97fd7d14a225-host\") pod \"7637e161-9969-46a7-b0b0-97fd7d14a225\" (UID: \"7637e161-9969-46a7-b0b0-97fd7d14a225\") " Dec 05 02:25:59 crc kubenswrapper[4665]: I1205 02:25:59.873594 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/7637e161-9969-46a7-b0b0-97fd7d14a225-host" (OuterVolumeSpecName: "host") pod "7637e161-9969-46a7-b0b0-97fd7d14a225" (UID: "7637e161-9969-46a7-b0b0-97fd7d14a225"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 02:25:59 crc kubenswrapper[4665]: I1205 02:25:59.873800 4665 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/7637e161-9969-46a7-b0b0-97fd7d14a225-host\") on node \"crc\" DevicePath \"\"" Dec 05 02:25:59 crc kubenswrapper[4665]: I1205 02:25:59.892520 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7637e161-9969-46a7-b0b0-97fd7d14a225-kube-api-access-hz92z" (OuterVolumeSpecName: "kube-api-access-hz92z") pod "7637e161-9969-46a7-b0b0-97fd7d14a225" (UID: "7637e161-9969-46a7-b0b0-97fd7d14a225"). InnerVolumeSpecName "kube-api-access-hz92z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:25:59 crc kubenswrapper[4665]: I1205 02:25:59.974968 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hz92z\" (UniqueName: \"kubernetes.io/projected/7637e161-9969-46a7-b0b0-97fd7d14a225-kube-api-access-hz92z\") on node \"crc\" DevicePath \"\"" Dec 05 02:26:00 crc kubenswrapper[4665]: I1205 02:26:00.674591 4665 scope.go:117] "RemoveContainer" containerID="e10b23916a5758508cf60621c1b881bc04a32ef199abd815991301d128289909" Dec 05 02:26:00 crc kubenswrapper[4665]: I1205 02:26:00.674644 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/crc-debug-cgrx7" Dec 05 02:26:00 crc kubenswrapper[4665]: I1205 02:26:00.902255 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7637e161-9969-46a7-b0b0-97fd7d14a225" path="/var/lib/kubelet/pods/7637e161-9969-46a7-b0b0-97fd7d14a225/volumes" Dec 05 02:26:14 crc kubenswrapper[4665]: I1205 02:26:14.922861 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:26:14 crc kubenswrapper[4665]: I1205 02:26:14.924238 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:26:14 crc kubenswrapper[4665]: I1205 02:26:14.924458 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 02:26:15 crc kubenswrapper[4665]: I1205 02:26:15.796012 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 02:26:15 crc kubenswrapper[4665]: I1205 02:26:15.796169 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" gracePeriod=600 Dec 05 02:26:15 crc kubenswrapper[4665]: E1205 02:26:15.940771 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:26:16 crc kubenswrapper[4665]: I1205 02:26:16.808659 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" exitCode=0 Dec 05 02:26:16 crc kubenswrapper[4665]: I1205 02:26:16.808698 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06"} Dec 05 02:26:16 crc kubenswrapper[4665]: I1205 02:26:16.808728 4665 scope.go:117] "RemoveContainer" containerID="9a7f31c5be7dcf08fe97e861724eaf43068f45a09a1ef09ab8e767cd3a279980" Dec 05 02:26:16 crc kubenswrapper[4665]: I1205 02:26:16.809315 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:26:16 crc kubenswrapper[4665]: E1205 02:26:16.809733 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:26:20 crc kubenswrapper[4665]: I1205 02:26:20.363496 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6f55bbcd96-6gf9l_db4c2bcc-14d3-4129-89d9-e25d6c01ef02/barbican-api/0.log" Dec 05 02:26:20 crc kubenswrapper[4665]: I1205 02:26:20.549502 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6f55bbcd96-6gf9l_db4c2bcc-14d3-4129-89d9-e25d6c01ef02/barbican-api-log/0.log" Dec 05 02:26:20 crc kubenswrapper[4665]: I1205 02:26:20.648183 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d6596b4bb-7zqjr_6cf44f27-5007-4db9-8784-715bdef486a0/barbican-keystone-listener/0.log" Dec 05 02:26:20 crc kubenswrapper[4665]: I1205 02:26:20.741021 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d6596b4bb-7zqjr_6cf44f27-5007-4db9-8784-715bdef486a0/barbican-keystone-listener-log/0.log" Dec 05 02:26:20 crc kubenswrapper[4665]: I1205 02:26:20.916178 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5bf7b484d7-782wq_9dd95469-e581-46aa-bbb2-c69214aa26c7/barbican-worker/0.log" Dec 05 02:26:20 crc kubenswrapper[4665]: I1205 02:26:20.975609 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5bf7b484d7-782wq_9dd95469-e581-46aa-bbb2-c69214aa26c7/barbican-worker-log/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.041624 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg_dd675614-41e7-40e1-b09d-639e6ed250fb/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.203692 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_94d0e25f-bdca-4da9-80c5-b81bedbdd7cc/ceilometer-notification-agent/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.277860 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_94d0e25f-bdca-4da9-80c5-b81bedbdd7cc/ceilometer-central-agent/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.323518 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_94d0e25f-bdca-4da9-80c5-b81bedbdd7cc/proxy-httpd/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.404171 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_94d0e25f-bdca-4da9-80c5-b81bedbdd7cc/sg-core/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.535601 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_84d298f5-1785-45d9-8195-ae1ba82c398a/cinder-api/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.586979 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_84d298f5-1785-45d9-8195-ae1ba82c398a/cinder-api-log/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.756371 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_5d1a4169-4d66-47db-a16d-c3f77df4334a/cinder-scheduler/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.809628 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_5d1a4169-4d66-47db-a16d-c3f77df4334a/probe/0.log" Dec 05 02:26:21 crc kubenswrapper[4665]: I1205 02:26:21.951601 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr_16df9956-9395-4412-92c6-9635bf23c681/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:22 crc kubenswrapper[4665]: I1205 02:26:22.088088 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8_f6b83f4a-6910-44b3-9fca-b9b455cc3d97/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:22 crc kubenswrapper[4665]: I1205 02:26:22.181933 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-54ffdb7d8c-2jtfc_ce335393-a026-4267-b337-ca077b2461b8/init/0.log" Dec 05 02:26:22 crc kubenswrapper[4665]: I1205 02:26:22.488593 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-54ffdb7d8c-2jtfc_ce335393-a026-4267-b337-ca077b2461b8/init/0.log" Dec 05 02:26:22 crc kubenswrapper[4665]: I1205 02:26:22.514253 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-54ffdb7d8c-2jtfc_ce335393-a026-4267-b337-ca077b2461b8/dnsmasq-dns/0.log" Dec 05 02:26:22 crc kubenswrapper[4665]: I1205 02:26:22.568133 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-7mx69_a185d71d-c81e-4faf-8c7a-c31c2ee82f31/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:22 crc kubenswrapper[4665]: I1205 02:26:22.727132 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9/glance-log/0.log" Dec 05 02:26:22 crc kubenswrapper[4665]: I1205 02:26:22.762898 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9/glance-httpd/0.log" Dec 05 02:26:23 crc kubenswrapper[4665]: I1205 02:26:23.006075 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37c48d2d-df08-4684-8215-be918d17cdae/glance-log/0.log" Dec 05 02:26:23 crc kubenswrapper[4665]: I1205 02:26:23.040393 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37c48d2d-df08-4684-8215-be918d17cdae/glance-httpd/0.log" Dec 05 02:26:23 crc kubenswrapper[4665]: I1205 02:26:23.208582 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-644f785f4-mslbg_64407a72-3fdf-450f-b5c0-913ee74bb437/horizon/1.log" Dec 05 02:26:23 crc kubenswrapper[4665]: I1205 02:26:23.296684 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-644f785f4-mslbg_64407a72-3fdf-450f-b5c0-913ee74bb437/horizon/0.log" Dec 05 02:26:23 crc kubenswrapper[4665]: I1205 02:26:23.595211 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd_caadc290-7caf-4c1f-8a2e-4c2b275e572b/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:23 crc kubenswrapper[4665]: I1205 02:26:23.665833 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-644f785f4-mslbg_64407a72-3fdf-450f-b5c0-913ee74bb437/horizon-log/0.log" Dec 05 02:26:24 crc kubenswrapper[4665]: I1205 02:26:24.318924 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-5jtmp_3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:24 crc kubenswrapper[4665]: I1205 02:26:24.616859 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415001-s26ld_01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa/keystone-cron/0.log" Dec 05 02:26:24 crc kubenswrapper[4665]: I1205 02:26:24.799406 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-79f86c7bd7-c4mss_4d1f4f51-e293-418f-a305-a7699a6cb866/keystone-api/0.log" Dec 05 02:26:24 crc kubenswrapper[4665]: I1205 02:26:24.851865 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c/kube-state-metrics/0.log" Dec 05 02:26:25 crc kubenswrapper[4665]: I1205 02:26:25.023280 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd_386e08fe-2108-4139-af9d-94fbaa7b7b12/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:25 crc kubenswrapper[4665]: I1205 02:26:25.769720 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl_76df2ad8-3e10-41ac-aa00-bea04feee0b9/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:25 crc kubenswrapper[4665]: I1205 02:26:25.961400 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5b484f7985-8qkjq_c6a6180f-d384-4015-8bf3-6563123c2f6a/neutron-httpd/0.log" Dec 05 02:26:26 crc kubenswrapper[4665]: I1205 02:26:26.164185 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5b484f7985-8qkjq_c6a6180f-d384-4015-8bf3-6563123c2f6a/neutron-api/0.log" Dec 05 02:26:26 crc kubenswrapper[4665]: I1205 02:26:26.704589 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_fe074ce6-2f02-47f2-9e94-5a910517f64d/nova-cell0-conductor-conductor/0.log" Dec 05 02:26:26 crc kubenswrapper[4665]: I1205 02:26:26.988918 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_632d0371-eca3-499e-8993-07e8025175d3/nova-cell1-conductor-conductor/0.log" Dec 05 02:26:27 crc kubenswrapper[4665]: I1205 02:26:27.316717 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a32e8029-fd7e-4662-8fbe-b83deaea60c8/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 02:26:27 crc kubenswrapper[4665]: I1205 02:26:27.516798 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_661570e0-6f0d-4fd7-8368-b4713af3da59/nova-api-log/0.log" Dec 05 02:26:27 crc kubenswrapper[4665]: I1205 02:26:27.577861 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-85px8_31efb8b4-c179-4d28-b197-2803bef0c22e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:27 crc kubenswrapper[4665]: I1205 02:26:27.678656 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_661570e0-6f0d-4fd7-8368-b4713af3da59/nova-api-api/0.log" Dec 05 02:26:27 crc kubenswrapper[4665]: I1205 02:26:27.823110 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_40f37057-0a9e-40c6-9b67-776cd9b19e54/nova-metadata-log/0.log" Dec 05 02:26:28 crc kubenswrapper[4665]: I1205 02:26:28.267165 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6/mysql-bootstrap/0.log" Dec 05 02:26:28 crc kubenswrapper[4665]: I1205 02:26:28.446423 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_165ee87a-9845-4f0c-b291-9d5fe6a1bdc5/nova-scheduler-scheduler/0.log" Dec 05 02:26:28 crc kubenswrapper[4665]: I1205 02:26:28.453358 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6/mysql-bootstrap/0.log" Dec 05 02:26:28 crc kubenswrapper[4665]: I1205 02:26:28.504447 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6/galera/0.log" Dec 05 02:26:28 crc kubenswrapper[4665]: I1205 02:26:28.701172 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_423b314f-ea29-4705-8447-3a316edd8c6b/mysql-bootstrap/0.log" Dec 05 02:26:28 crc kubenswrapper[4665]: I1205 02:26:28.985472 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_423b314f-ea29-4705-8447-3a316edd8c6b/mysql-bootstrap/0.log" Dec 05 02:26:28 crc kubenswrapper[4665]: I1205 02:26:28.998885 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_423b314f-ea29-4705-8447-3a316edd8c6b/galera/0.log" Dec 05 02:26:29 crc kubenswrapper[4665]: I1205 02:26:29.167492 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a8461ae3-f75f-42de-b320-c9dc4b1545ec/openstackclient/0.log" Dec 05 02:26:29 crc kubenswrapper[4665]: I1205 02:26:29.323945 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ts2h8_e65a93ed-16e2-4cf7-a295-a3517e553335/openstack-network-exporter/0.log" Dec 05 02:26:29 crc kubenswrapper[4665]: I1205 02:26:29.455587 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_40f37057-0a9e-40c6-9b67-776cd9b19e54/nova-metadata-metadata/0.log" Dec 05 02:26:29 crc kubenswrapper[4665]: I1205 02:26:29.605453 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7xcgj_ca9ae867-4d7f-4a30-ab90-ba5113fb9029/ovsdb-server-init/0.log" Dec 05 02:26:29 crc kubenswrapper[4665]: I1205 02:26:29.850895 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7xcgj_ca9ae867-4d7f-4a30-ab90-ba5113fb9029/ovs-vswitchd/0.log" Dec 05 02:26:29 crc kubenswrapper[4665]: I1205 02:26:29.893049 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:26:29 crc kubenswrapper[4665]: E1205 02:26:29.893392 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:26:29 crc kubenswrapper[4665]: I1205 02:26:29.939148 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7xcgj_ca9ae867-4d7f-4a30-ab90-ba5113fb9029/ovsdb-server-init/0.log" Dec 05 02:26:29 crc kubenswrapper[4665]: I1205 02:26:29.977561 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7xcgj_ca9ae867-4d7f-4a30-ab90-ba5113fb9029/ovsdb-server/0.log" Dec 05 02:26:30 crc kubenswrapper[4665]: I1205 02:26:30.156104 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-zr2m2_41de0d44-b33f-43c4-a6c1-54830596874b/ovn-controller/0.log" Dec 05 02:26:30 crc kubenswrapper[4665]: I1205 02:26:30.276268 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-dxlht_21b83a7a-3186-47f2-851a-b65efe2348a8/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:30 crc kubenswrapper[4665]: I1205 02:26:30.467932 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_acb7355b-b799-4b12-a8f1-75dd2439696f/ovn-northd/0.log" Dec 05 02:26:30 crc kubenswrapper[4665]: I1205 02:26:30.531569 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_acb7355b-b799-4b12-a8f1-75dd2439696f/openstack-network-exporter/0.log" Dec 05 02:26:30 crc kubenswrapper[4665]: I1205 02:26:30.668004 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_df5300cc-4ce3-4574-a775-595607aeddb6/openstack-network-exporter/0.log" Dec 05 02:26:30 crc kubenswrapper[4665]: I1205 02:26:30.708589 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_df5300cc-4ce3-4574-a775-595607aeddb6/ovsdbserver-nb/0.log" Dec 05 02:26:30 crc kubenswrapper[4665]: I1205 02:26:30.887495 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_68f1be10-e014-492c-9fb2-f6131ee209d4/ovsdbserver-sb/0.log" Dec 05 02:26:30 crc kubenswrapper[4665]: I1205 02:26:30.945108 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_68f1be10-e014-492c-9fb2-f6131ee209d4/openstack-network-exporter/0.log" Dec 05 02:26:31 crc kubenswrapper[4665]: I1205 02:26:31.355263 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7c79d9c44-5ps46_52283875-2314-426a-b5ff-77a8b000f4cc/placement-api/0.log" Dec 05 02:26:31 crc kubenswrapper[4665]: I1205 02:26:31.376092 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b5dc1946-d77e-4106-9350-326f32a2ae55/setup-container/0.log" Dec 05 02:26:31 crc kubenswrapper[4665]: I1205 02:26:31.408919 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7c79d9c44-5ps46_52283875-2314-426a-b5ff-77a8b000f4cc/placement-log/0.log" Dec 05 02:26:31 crc kubenswrapper[4665]: I1205 02:26:31.617004 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b5dc1946-d77e-4106-9350-326f32a2ae55/rabbitmq/0.log" Dec 05 02:26:31 crc kubenswrapper[4665]: I1205 02:26:31.624609 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b5dc1946-d77e-4106-9350-326f32a2ae55/setup-container/0.log" Dec 05 02:26:31 crc kubenswrapper[4665]: I1205 02:26:31.753547 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7a8135c5-ab50-4a2b-895a-7976da8b5bee/setup-container/0.log" Dec 05 02:26:31 crc kubenswrapper[4665]: I1205 02:26:31.957644 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7a8135c5-ab50-4a2b-895a-7976da8b5bee/setup-container/0.log" Dec 05 02:26:32 crc kubenswrapper[4665]: I1205 02:26:32.014514 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7a8135c5-ab50-4a2b-895a-7976da8b5bee/rabbitmq/0.log" Dec 05 02:26:32 crc kubenswrapper[4665]: I1205 02:26:32.189264 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77_e27c2d20-292e-4f38-8fb9-8addf5cb5ebf/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:32 crc kubenswrapper[4665]: I1205 02:26:32.382396 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-4wjpm_61298cb7-8b67-4f94-bd96-ee4ec8189d00/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:32 crc kubenswrapper[4665]: I1205 02:26:32.441832 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2_f3777f3d-f5e6-479d-947b-baf234749487/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:33 crc kubenswrapper[4665]: I1205 02:26:33.067588 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-nx4nq_0e47d679-dd01-454e-8ae4-887d6d676d15/ssh-known-hosts-edpm-deployment/0.log" Dec 05 02:26:33 crc kubenswrapper[4665]: I1205 02:26:33.134864 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-8fk25_66505ea7-937f-4f07-b036-afca1adc368c/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:33 crc kubenswrapper[4665]: I1205 02:26:33.482406 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5f4f69cd4c-jp87r_5cf42108-5fbb-4bb1-b941-b2181f99fa5c/proxy-server/0.log" Dec 05 02:26:33 crc kubenswrapper[4665]: I1205 02:26:33.521901 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5f4f69cd4c-jp87r_5cf42108-5fbb-4bb1-b941-b2181f99fa5c/proxy-httpd/0.log" Dec 05 02:26:33 crc kubenswrapper[4665]: I1205 02:26:33.697018 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-zlrjx_796fddd5-4127-4632-8728-406e29348c74/swift-ring-rebalance/0.log" Dec 05 02:26:33 crc kubenswrapper[4665]: I1205 02:26:33.745412 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/account-reaper/0.log" Dec 05 02:26:33 crc kubenswrapper[4665]: I1205 02:26:33.836604 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/account-auditor/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.005198 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/account-replicator/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.009537 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/account-server/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.068948 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/container-auditor/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.154336 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/container-replicator/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.502495 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-expirer/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.533281 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/container-updater/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.543418 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/container-server/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.592448 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-auditor/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.784154 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-replicator/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.830859 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/rsync/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.836652 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-updater/0.log" Dec 05 02:26:34 crc kubenswrapper[4665]: I1205 02:26:34.862313 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-server/0.log" Dec 05 02:26:35 crc kubenswrapper[4665]: I1205 02:26:35.042605 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/swift-recon-cron/0.log" Dec 05 02:26:35 crc kubenswrapper[4665]: I1205 02:26:35.228451 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-hrk76_820a4267-c307-42cb-96cb-482a2919cfe7/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:35 crc kubenswrapper[4665]: I1205 02:26:35.316904 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_a3dcea46-0cf1-441d-84ba-0b327c396844/tempest-tests-tempest-tests-runner/0.log" Dec 05 02:26:35 crc kubenswrapper[4665]: I1205 02:26:35.485885 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_cfdb58d0-875d-49d7-82ef-13bc4785a25c/test-operator-logs-container/0.log" Dec 05 02:26:35 crc kubenswrapper[4665]: I1205 02:26:35.619324 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6_61d15914-07c6-4782-b8e2-96ec816206fb/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:26:44 crc kubenswrapper[4665]: I1205 02:26:44.899714 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:26:44 crc kubenswrapper[4665]: E1205 02:26:44.900746 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:26:45 crc kubenswrapper[4665]: I1205 02:26:45.135245 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_02847117-544d-400b-b9a0-4d10200e0f0d/memcached/0.log" Dec 05 02:26:58 crc kubenswrapper[4665]: I1205 02:26:58.896475 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:26:58 crc kubenswrapper[4665]: E1205 02:26:58.897599 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:27:04 crc kubenswrapper[4665]: I1205 02:27:04.679908 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/util/0.log" Dec 05 02:27:04 crc kubenswrapper[4665]: I1205 02:27:04.910168 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/util/0.log" Dec 05 02:27:04 crc kubenswrapper[4665]: I1205 02:27:04.946190 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/pull/0.log" Dec 05 02:27:04 crc kubenswrapper[4665]: I1205 02:27:04.960048 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/pull/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.149675 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/pull/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.205492 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/util/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.251963 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/extract/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.334514 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-8hblt_fa7a4766-c028-4114-b979-a7900e21103c/kube-rbac-proxy/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.469325 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-8hblt_fa7a4766-c028-4114-b979-a7900e21103c/manager/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.487757 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-ptwdk_669a406d-6e51-4ead-89ff-4a1df7cb7308/kube-rbac-proxy/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.622133 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-ptwdk_669a406d-6e51-4ead-89ff-4a1df7cb7308/manager/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.682317 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-6bsqf_c8e9557a-3433-413b-a5b2-9137f2b9c584/kube-rbac-proxy/0.log" Dec 05 02:27:05 crc kubenswrapper[4665]: I1205 02:27:05.699906 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-6bsqf_c8e9557a-3433-413b-a5b2-9137f2b9c584/manager/0.log" Dec 05 02:27:06 crc kubenswrapper[4665]: I1205 02:27:06.296157 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-ks65z_277c6945-0cbb-4a0a-8e22-e990d76da759/kube-rbac-proxy/0.log" Dec 05 02:27:06 crc kubenswrapper[4665]: I1205 02:27:06.375545 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-ks65z_277c6945-0cbb-4a0a-8e22-e990d76da759/manager/0.log" Dec 05 02:27:06 crc kubenswrapper[4665]: I1205 02:27:06.520793 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-dz2g4_93b5a195-1f87-4eb3-8511-916e652b3913/kube-rbac-proxy/0.log" Dec 05 02:27:06 crc kubenswrapper[4665]: I1205 02:27:06.529225 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-dz2g4_93b5a195-1f87-4eb3-8511-916e652b3913/manager/0.log" Dec 05 02:27:06 crc kubenswrapper[4665]: I1205 02:27:06.578569 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q6zxf_8bd45d32-e93b-415b-a885-3926454418c9/kube-rbac-proxy/0.log" Dec 05 02:27:06 crc kubenswrapper[4665]: I1205 02:27:06.722692 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q6zxf_8bd45d32-e93b-415b-a885-3926454418c9/manager/0.log" Dec 05 02:27:06 crc kubenswrapper[4665]: I1205 02:27:06.786928 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-lt2sv_68e17c62-dc97-4bf6-b9de-340e03d5cbda/kube-rbac-proxy/0.log" Dec 05 02:27:06 crc kubenswrapper[4665]: I1205 02:27:06.941967 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-9pbl9_3a85872e-618d-4847-aae0-1eb366f16003/kube-rbac-proxy/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.038003 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-lt2sv_68e17c62-dc97-4bf6-b9de-340e03d5cbda/manager/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.059036 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-9pbl9_3a85872e-618d-4847-aae0-1eb366f16003/manager/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.201652 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-nfmtb_5c1d9b52-7fcd-4615-9faa-af55e4165ffb/kube-rbac-proxy/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.313515 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-nfmtb_5c1d9b52-7fcd-4615-9faa-af55e4165ffb/manager/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.435191 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-dxk89_1fd940e7-8adc-4859-a763-58d909016fd6/manager/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.482067 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-5qmj8_e7235695-da27-4f1d-afec-a6f2a3decc79/kube-rbac-proxy/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.496744 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-dxk89_1fd940e7-8adc-4859-a763-58d909016fd6/kube-rbac-proxy/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.682334 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-5qmj8_e7235695-da27-4f1d-afec-a6f2a3decc79/manager/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.759205 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-fbmtc_8d94649e-ea57-4b1a-9fb0-2b37b567cd77/kube-rbac-proxy/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.830804 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-fbmtc_8d94649e-ea57-4b1a-9fb0-2b37b567cd77/manager/0.log" Dec 05 02:27:07 crc kubenswrapper[4665]: I1205 02:27:07.967503 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-fgjvz_fb8b497b-5207-408c-9e30-e7169c4ccede/kube-rbac-proxy/0.log" Dec 05 02:27:08 crc kubenswrapper[4665]: I1205 02:27:08.045262 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-fgjvz_fb8b497b-5207-408c-9e30-e7169c4ccede/manager/0.log" Dec 05 02:27:08 crc kubenswrapper[4665]: I1205 02:27:08.080494 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-dnqxs_764119ea-4228-4141-a7a7-faee0be8d052/kube-rbac-proxy/0.log" Dec 05 02:27:08 crc kubenswrapper[4665]: I1205 02:27:08.185685 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-dnqxs_764119ea-4228-4141-a7a7-faee0be8d052/manager/0.log" Dec 05 02:27:08 crc kubenswrapper[4665]: I1205 02:27:08.304766 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9_77dd89d3-29be-4d9c-ad35-a6281d6bd57f/kube-rbac-proxy/0.log" Dec 05 02:27:08 crc kubenswrapper[4665]: I1205 02:27:08.321981 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9_77dd89d3-29be-4d9c-ad35-a6281d6bd57f/manager/0.log" Dec 05 02:27:08 crc kubenswrapper[4665]: I1205 02:27:08.698845 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-pwd7r_68ae69df-a135-4ab5-b79c-47268f37c17f/registry-server/0.log" Dec 05 02:27:08 crc kubenswrapper[4665]: I1205 02:27:08.795120 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6f79d9dccc-zkrtn_119e3b58-5f63-441a-b2d9-9ea2e83df2b8/operator/0.log" Dec 05 02:27:08 crc kubenswrapper[4665]: I1205 02:27:08.997408 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-nmpk2_c874c72f-9ac0-4ce5-bf5c-fc9e983b725c/kube-rbac-proxy/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.032571 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fstjd_8543ed45-b6f4-4f54-bc94-756bf6f031e6/kube-rbac-proxy/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.088625 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-nmpk2_c874c72f-9ac0-4ce5-bf5c-fc9e983b725c/manager/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.269018 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fstjd_8543ed45-b6f4-4f54-bc94-756bf6f031e6/manager/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.448094 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-dmtqm_7bafbafa-4235-456c-a2b3-7990ad3f14e2/operator/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.617531 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-dlncm_3933c0fc-ca36-42a1-b418-9db281576617/kube-rbac-proxy/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.663125 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-dlncm_3933c0fc-ca36-42a1-b418-9db281576617/manager/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.678475 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79966545b7-vd7h4_afada5e1-db62-40f7-b5a9-1c36f42670d4/manager/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.815667 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-6dh9j_f353bd63-e193-4a26-8ba6-32f1eec034a8/kube-rbac-proxy/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.903371 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-bk8hv_0c289bf9-999d-4396-b15b-b27fded35180/kube-rbac-proxy/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.945124 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-6dh9j_f353bd63-e193-4a26-8ba6-32f1eec034a8/manager/0.log" Dec 05 02:27:09 crc kubenswrapper[4665]: I1205 02:27:09.961551 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-bk8hv_0c289bf9-999d-4396-b15b-b27fded35180/manager/0.log" Dec 05 02:27:10 crc kubenswrapper[4665]: I1205 02:27:10.105159 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-sbnx4_5009fa32-5a01-46dc-9238-2f3c8ef7fddc/kube-rbac-proxy/0.log" Dec 05 02:27:10 crc kubenswrapper[4665]: I1205 02:27:10.160083 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-sbnx4_5009fa32-5a01-46dc-9238-2f3c8ef7fddc/manager/0.log" Dec 05 02:27:10 crc kubenswrapper[4665]: I1205 02:27:10.895060 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:27:10 crc kubenswrapper[4665]: E1205 02:27:10.895687 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:27:23 crc kubenswrapper[4665]: I1205 02:27:23.893971 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:27:23 crc kubenswrapper[4665]: E1205 02:27:23.894910 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:27:29 crc kubenswrapper[4665]: I1205 02:27:29.207365 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-k5gsl_f20fec7f-c7bb-4bb5-b86c-076b8931aa97/control-plane-machine-set-operator/0.log" Dec 05 02:27:29 crc kubenswrapper[4665]: I1205 02:27:29.465001 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-42qzk_cfa776fb-8b8d-4cd1-941a-57e2672afdeb/kube-rbac-proxy/0.log" Dec 05 02:27:29 crc kubenswrapper[4665]: I1205 02:27:29.512102 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-42qzk_cfa776fb-8b8d-4cd1-941a-57e2672afdeb/machine-api-operator/0.log" Dec 05 02:27:36 crc kubenswrapper[4665]: I1205 02:27:36.894177 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:27:36 crc kubenswrapper[4665]: E1205 02:27:36.896108 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:27:41 crc kubenswrapper[4665]: I1205 02:27:41.329403 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-hdlpm_5f586972-9546-4a85-a442-590c70b38de3/cert-manager-controller/0.log" Dec 05 02:27:41 crc kubenswrapper[4665]: I1205 02:27:41.572071 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-2czfg_7af985e3-148d-4974-b2ea-d9679063234a/cert-manager-webhook/0.log" Dec 05 02:27:41 crc kubenswrapper[4665]: I1205 02:27:41.626250 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-pv2mm_7b226a2a-8fb6-4694-a1a7-9a86e3d222e9/cert-manager-cainjector/0.log" Dec 05 02:27:51 crc kubenswrapper[4665]: I1205 02:27:51.893994 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:27:51 crc kubenswrapper[4665]: E1205 02:27:51.894593 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:27:54 crc kubenswrapper[4665]: I1205 02:27:54.079200 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-dp69l_0c556144-28be-4719-91ae-78d016ea7d7a/nmstate-console-plugin/0.log" Dec 05 02:27:54 crc kubenswrapper[4665]: I1205 02:27:54.418578 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-glrwt_faa11fdb-cf1b-48a0-87b0-d40430440a73/nmstate-handler/0.log" Dec 05 02:27:54 crc kubenswrapper[4665]: I1205 02:27:54.424416 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-hgsvm_0e886560-4686-45f7-b50d-c0544fc53448/kube-rbac-proxy/0.log" Dec 05 02:27:54 crc kubenswrapper[4665]: I1205 02:27:54.586214 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-hgsvm_0e886560-4686-45f7-b50d-c0544fc53448/nmstate-metrics/0.log" Dec 05 02:27:54 crc kubenswrapper[4665]: I1205 02:27:54.642524 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-7mmkk_1eb252f2-27b1-4cd9-be84-4183313f0710/nmstate-operator/0.log" Dec 05 02:27:54 crc kubenswrapper[4665]: I1205 02:27:54.775731 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-jgwh9_e3ec97f0-b128-4247-aa81-c51298bd148c/nmstate-webhook/0.log" Dec 05 02:28:04 crc kubenswrapper[4665]: I1205 02:28:04.899384 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:28:04 crc kubenswrapper[4665]: E1205 02:28:04.900203 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.131247 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-dw2lj_b1652627-99c3-4670-84ec-c770bf76a4b4/kube-rbac-proxy/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.148269 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-dw2lj_b1652627-99c3-4670-84ec-c770bf76a4b4/controller/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.338394 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-frr-files/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.545599 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-frr-files/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.587100 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-reloader/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.595565 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-metrics/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.643244 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-reloader/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.901689 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-metrics/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.902073 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-reloader/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.910481 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-metrics/0.log" Dec 05 02:28:12 crc kubenswrapper[4665]: I1205 02:28:12.918264 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-frr-files/0.log" Dec 05 02:28:13 crc kubenswrapper[4665]: I1205 02:28:13.571593 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-frr-files/0.log" Dec 05 02:28:13 crc kubenswrapper[4665]: I1205 02:28:13.692113 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-reloader/0.log" Dec 05 02:28:13 crc kubenswrapper[4665]: I1205 02:28:13.697707 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-metrics/0.log" Dec 05 02:28:13 crc kubenswrapper[4665]: I1205 02:28:13.737983 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/controller/0.log" Dec 05 02:28:13 crc kubenswrapper[4665]: I1205 02:28:13.960842 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/kube-rbac-proxy/0.log" Dec 05 02:28:14 crc kubenswrapper[4665]: I1205 02:28:14.051483 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/frr-metrics/0.log" Dec 05 02:28:14 crc kubenswrapper[4665]: I1205 02:28:14.051653 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/kube-rbac-proxy-frr/0.log" Dec 05 02:28:14 crc kubenswrapper[4665]: I1205 02:28:14.262625 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/reloader/0.log" Dec 05 02:28:14 crc kubenswrapper[4665]: I1205 02:28:14.387210 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-pchdx_f444b2fb-024e-4f65-84cc-4bc16a3cc6a9/frr-k8s-webhook-server/0.log" Dec 05 02:28:14 crc kubenswrapper[4665]: I1205 02:28:14.678979 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-84549bb967-wwjmg_c3dedfa3-52da-4bbc-b080-ce01610f9152/manager/0.log" Dec 05 02:28:14 crc kubenswrapper[4665]: I1205 02:28:14.873384 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6868996d58-rgvnq_067086f8-f82e-45c4-a7dd-79cacf3192e5/webhook-server/0.log" Dec 05 02:28:14 crc kubenswrapper[4665]: I1205 02:28:14.934334 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/frr/0.log" Dec 05 02:28:15 crc kubenswrapper[4665]: I1205 02:28:15.015005 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wxh7x_44aa572b-ef2b-4b3a-83ef-9a45cfd73067/kube-rbac-proxy/0.log" Dec 05 02:28:15 crc kubenswrapper[4665]: I1205 02:28:15.354639 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wxh7x_44aa572b-ef2b-4b3a-83ef-9a45cfd73067/speaker/0.log" Dec 05 02:28:18 crc kubenswrapper[4665]: I1205 02:28:18.893709 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:28:18 crc kubenswrapper[4665]: E1205 02:28:18.894156 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:28:29 crc kubenswrapper[4665]: I1205 02:28:29.643126 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/util/0.log" Dec 05 02:28:29 crc kubenswrapper[4665]: I1205 02:28:29.921680 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/pull/0.log" Dec 05 02:28:29 crc kubenswrapper[4665]: I1205 02:28:29.943473 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/pull/0.log" Dec 05 02:28:29 crc kubenswrapper[4665]: I1205 02:28:29.951684 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/util/0.log" Dec 05 02:28:30 crc kubenswrapper[4665]: I1205 02:28:30.163881 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/extract/0.log" Dec 05 02:28:30 crc kubenswrapper[4665]: I1205 02:28:30.180007 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/pull/0.log" Dec 05 02:28:30 crc kubenswrapper[4665]: I1205 02:28:30.187078 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/util/0.log" Dec 05 02:28:30 crc kubenswrapper[4665]: I1205 02:28:30.563694 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/util/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.139360 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/pull/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.147445 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/util/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.217012 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/pull/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.381756 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/util/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.454240 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/extract/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.466769 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/pull/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.667527 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-utilities/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.870857 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-content/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.870870 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-utilities/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.888817 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-content/0.log" Dec 05 02:28:31 crc kubenswrapper[4665]: I1205 02:28:31.926992 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:28:31 crc kubenswrapper[4665]: E1205 02:28:31.927251 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:28:32 crc kubenswrapper[4665]: I1205 02:28:32.179382 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-utilities/0.log" Dec 05 02:28:32 crc kubenswrapper[4665]: I1205 02:28:32.234012 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-content/0.log" Dec 05 02:28:32 crc kubenswrapper[4665]: I1205 02:28:32.497632 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-utilities/0.log" Dec 05 02:28:32 crc kubenswrapper[4665]: I1205 02:28:32.795889 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-content/0.log" Dec 05 02:28:32 crc kubenswrapper[4665]: I1205 02:28:32.835730 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-utilities/0.log" Dec 05 02:28:32 crc kubenswrapper[4665]: I1205 02:28:32.845887 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/registry-server/0.log" Dec 05 02:28:32 crc kubenswrapper[4665]: I1205 02:28:32.857375 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-content/0.log" Dec 05 02:28:33 crc kubenswrapper[4665]: I1205 02:28:33.150244 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-content/0.log" Dec 05 02:28:33 crc kubenswrapper[4665]: I1205 02:28:33.157881 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-utilities/0.log" Dec 05 02:28:33 crc kubenswrapper[4665]: I1205 02:28:33.410352 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-6lq6k_8b497962-196d-41aa-aacc-1d68536dfec6/marketplace-operator/0.log" Dec 05 02:28:33 crc kubenswrapper[4665]: I1205 02:28:33.684341 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-utilities/0.log" Dec 05 02:28:33 crc kubenswrapper[4665]: I1205 02:28:33.715110 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/registry-server/0.log" Dec 05 02:28:33 crc kubenswrapper[4665]: I1205 02:28:33.881459 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-utilities/0.log" Dec 05 02:28:33 crc kubenswrapper[4665]: I1205 02:28:33.913708 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-content/0.log" Dec 05 02:28:33 crc kubenswrapper[4665]: I1205 02:28:33.919698 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-content/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.150573 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-utilities/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.196702 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-content/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.312655 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-utilities/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.410181 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/registry-server/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.450509 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-content/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.510038 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-utilities/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.544753 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-content/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.702205 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-content/0.log" Dec 05 02:28:34 crc kubenswrapper[4665]: I1205 02:28:34.712936 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-utilities/0.log" Dec 05 02:28:35 crc kubenswrapper[4665]: I1205 02:28:35.191878 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/registry-server/0.log" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.371354 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-m5d9p"] Dec 05 02:28:40 crc kubenswrapper[4665]: E1205 02:28:40.372444 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7637e161-9969-46a7-b0b0-97fd7d14a225" containerName="container-00" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.372460 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="7637e161-9969-46a7-b0b0-97fd7d14a225" containerName="container-00" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.372727 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="7637e161-9969-46a7-b0b0-97fd7d14a225" containerName="container-00" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.374494 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.381248 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m5d9p"] Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.478529 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5hw2w\" (UniqueName: \"kubernetes.io/projected/a1231b6e-e966-4486-a272-fee8c3b7b83f-kube-api-access-5hw2w\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.478630 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-catalog-content\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.478716 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-utilities\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.579957 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-utilities\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.580048 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5hw2w\" (UniqueName: \"kubernetes.io/projected/a1231b6e-e966-4486-a272-fee8c3b7b83f-kube-api-access-5hw2w\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.580129 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-catalog-content\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.580633 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-catalog-content\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.580648 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-utilities\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.604434 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5hw2w\" (UniqueName: \"kubernetes.io/projected/a1231b6e-e966-4486-a272-fee8c3b7b83f-kube-api-access-5hw2w\") pod \"redhat-operators-m5d9p\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:40 crc kubenswrapper[4665]: I1205 02:28:40.713349 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:41 crc kubenswrapper[4665]: I1205 02:28:41.295415 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-m5d9p"] Dec 05 02:28:42 crc kubenswrapper[4665]: I1205 02:28:42.159963 4665 generic.go:334] "Generic (PLEG): container finished" podID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerID="94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59" exitCode=0 Dec 05 02:28:42 crc kubenswrapper[4665]: I1205 02:28:42.160500 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5d9p" event={"ID":"a1231b6e-e966-4486-a272-fee8c3b7b83f","Type":"ContainerDied","Data":"94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59"} Dec 05 02:28:42 crc kubenswrapper[4665]: I1205 02:28:42.160536 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5d9p" event={"ID":"a1231b6e-e966-4486-a272-fee8c3b7b83f","Type":"ContainerStarted","Data":"93f4f7850cbac418755b2d99318ff63afa9e6de14e87871f0a2ccd3887a93af5"} Dec 05 02:28:42 crc kubenswrapper[4665]: I1205 02:28:42.163009 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 02:28:43 crc kubenswrapper[4665]: I1205 02:28:43.169332 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5d9p" event={"ID":"a1231b6e-e966-4486-a272-fee8c3b7b83f","Type":"ContainerStarted","Data":"42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818"} Dec 05 02:28:43 crc kubenswrapper[4665]: I1205 02:28:43.893677 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:28:43 crc kubenswrapper[4665]: E1205 02:28:43.893984 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:28:47 crc kubenswrapper[4665]: I1205 02:28:47.203507 4665 generic.go:334] "Generic (PLEG): container finished" podID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerID="42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818" exitCode=0 Dec 05 02:28:47 crc kubenswrapper[4665]: I1205 02:28:47.203700 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5d9p" event={"ID":"a1231b6e-e966-4486-a272-fee8c3b7b83f","Type":"ContainerDied","Data":"42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818"} Dec 05 02:28:48 crc kubenswrapper[4665]: I1205 02:28:48.216495 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5d9p" event={"ID":"a1231b6e-e966-4486-a272-fee8c3b7b83f","Type":"ContainerStarted","Data":"c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e"} Dec 05 02:28:48 crc kubenswrapper[4665]: I1205 02:28:48.233101 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-m5d9p" podStartSLOduration=2.769941497 podStartE2EDuration="8.23306258s" podCreationTimestamp="2025-12-05 02:28:40 +0000 UTC" firstStartedPulling="2025-12-05 02:28:42.162475518 +0000 UTC m=+4697.501867817" lastFinishedPulling="2025-12-05 02:28:47.625596601 +0000 UTC m=+4702.964988900" observedRunningTime="2025-12-05 02:28:48.23180677 +0000 UTC m=+4703.571199079" watchObservedRunningTime="2025-12-05 02:28:48.23306258 +0000 UTC m=+4703.572454909" Dec 05 02:28:50 crc kubenswrapper[4665]: I1205 02:28:50.714401 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:50 crc kubenswrapper[4665]: I1205 02:28:50.714452 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:28:51 crc kubenswrapper[4665]: I1205 02:28:51.775804 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-m5d9p" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="registry-server" probeResult="failure" output=< Dec 05 02:28:51 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 02:28:51 crc kubenswrapper[4665]: > Dec 05 02:28:57 crc kubenswrapper[4665]: I1205 02:28:57.894560 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:28:57 crc kubenswrapper[4665]: E1205 02:28:57.897015 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:29:00 crc kubenswrapper[4665]: I1205 02:29:00.802731 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:29:00 crc kubenswrapper[4665]: I1205 02:29:00.860278 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:29:01 crc kubenswrapper[4665]: I1205 02:29:01.068465 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m5d9p"] Dec 05 02:29:02 crc kubenswrapper[4665]: I1205 02:29:02.359870 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-m5d9p" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="registry-server" containerID="cri-o://c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e" gracePeriod=2 Dec 05 02:29:02 crc kubenswrapper[4665]: I1205 02:29:02.924634 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.056532 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-utilities\") pod \"a1231b6e-e966-4486-a272-fee8c3b7b83f\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.056809 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5hw2w\" (UniqueName: \"kubernetes.io/projected/a1231b6e-e966-4486-a272-fee8c3b7b83f-kube-api-access-5hw2w\") pod \"a1231b6e-e966-4486-a272-fee8c3b7b83f\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.056851 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-catalog-content\") pod \"a1231b6e-e966-4486-a272-fee8c3b7b83f\" (UID: \"a1231b6e-e966-4486-a272-fee8c3b7b83f\") " Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.057322 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-utilities" (OuterVolumeSpecName: "utilities") pod "a1231b6e-e966-4486-a272-fee8c3b7b83f" (UID: "a1231b6e-e966-4486-a272-fee8c3b7b83f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.059137 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.082559 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a1231b6e-e966-4486-a272-fee8c3b7b83f-kube-api-access-5hw2w" (OuterVolumeSpecName: "kube-api-access-5hw2w") pod "a1231b6e-e966-4486-a272-fee8c3b7b83f" (UID: "a1231b6e-e966-4486-a272-fee8c3b7b83f"). InnerVolumeSpecName "kube-api-access-5hw2w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.158868 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a1231b6e-e966-4486-a272-fee8c3b7b83f" (UID: "a1231b6e-e966-4486-a272-fee8c3b7b83f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.161337 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5hw2w\" (UniqueName: \"kubernetes.io/projected/a1231b6e-e966-4486-a272-fee8c3b7b83f-kube-api-access-5hw2w\") on node \"crc\" DevicePath \"\"" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.161393 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a1231b6e-e966-4486-a272-fee8c3b7b83f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.471483 4665 generic.go:334] "Generic (PLEG): container finished" podID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerID="c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e" exitCode=0 Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.471541 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5d9p" event={"ID":"a1231b6e-e966-4486-a272-fee8c3b7b83f","Type":"ContainerDied","Data":"c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e"} Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.471567 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-m5d9p" event={"ID":"a1231b6e-e966-4486-a272-fee8c3b7b83f","Type":"ContainerDied","Data":"93f4f7850cbac418755b2d99318ff63afa9e6de14e87871f0a2ccd3887a93af5"} Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.471616 4665 scope.go:117] "RemoveContainer" containerID="c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.471683 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-m5d9p" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.541326 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-m5d9p"] Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.548976 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-m5d9p"] Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.556098 4665 scope.go:117] "RemoveContainer" containerID="42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.675496 4665 scope.go:117] "RemoveContainer" containerID="94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.731457 4665 scope.go:117] "RemoveContainer" containerID="c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e" Dec 05 02:29:03 crc kubenswrapper[4665]: E1205 02:29:03.736530 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e\": container with ID starting with c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e not found: ID does not exist" containerID="c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.736583 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e"} err="failed to get container status \"c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e\": rpc error: code = NotFound desc = could not find container \"c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e\": container with ID starting with c2c5c6cfe3ccd8a1760429a8617090e07a92c04593f0f8313adf9d5c7ffdd72e not found: ID does not exist" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.736608 4665 scope.go:117] "RemoveContainer" containerID="42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818" Dec 05 02:29:03 crc kubenswrapper[4665]: E1205 02:29:03.738231 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818\": container with ID starting with 42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818 not found: ID does not exist" containerID="42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.738304 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818"} err="failed to get container status \"42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818\": rpc error: code = NotFound desc = could not find container \"42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818\": container with ID starting with 42836db89e5ab4389b60057512e455ab0e2efeccac5a3c353189a5f961e1e818 not found: ID does not exist" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.738334 4665 scope.go:117] "RemoveContainer" containerID="94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59" Dec 05 02:29:03 crc kubenswrapper[4665]: E1205 02:29:03.739122 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59\": container with ID starting with 94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59 not found: ID does not exist" containerID="94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59" Dec 05 02:29:03 crc kubenswrapper[4665]: I1205 02:29:03.739162 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59"} err="failed to get container status \"94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59\": rpc error: code = NotFound desc = could not find container \"94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59\": container with ID starting with 94ffef277b0e172675b2a3050368dc9031ea3533cf3ce8c27ed5a45c28b80b59 not found: ID does not exist" Dec 05 02:29:03 crc kubenswrapper[4665]: E1205 02:29:03.798006 4665 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1231b6e_e966_4486_a272_fee8c3b7b83f.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda1231b6e_e966_4486_a272_fee8c3b7b83f.slice/crio-93f4f7850cbac418755b2d99318ff63afa9e6de14e87871f0a2ccd3887a93af5\": RecentStats: unable to find data in memory cache]" Dec 05 02:29:04 crc kubenswrapper[4665]: I1205 02:29:04.903643 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" path="/var/lib/kubelet/pods/a1231b6e-e966-4486-a272-fee8c3b7b83f/volumes" Dec 05 02:29:11 crc kubenswrapper[4665]: I1205 02:29:11.893864 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:29:11 crc kubenswrapper[4665]: E1205 02:29:11.894703 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:29:23 crc kubenswrapper[4665]: I1205 02:29:23.892946 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:29:23 crc kubenswrapper[4665]: E1205 02:29:23.893842 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:29:36 crc kubenswrapper[4665]: I1205 02:29:36.894378 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:29:36 crc kubenswrapper[4665]: E1205 02:29:36.895228 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:29:49 crc kubenswrapper[4665]: I1205 02:29:49.892994 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:29:49 crc kubenswrapper[4665]: E1205 02:29:49.893788 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.152170 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl"] Dec 05 02:30:00 crc kubenswrapper[4665]: E1205 02:30:00.153080 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="extract-content" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.153092 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="extract-content" Dec 05 02:30:00 crc kubenswrapper[4665]: E1205 02:30:00.153104 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="registry-server" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.153111 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="registry-server" Dec 05 02:30:00 crc kubenswrapper[4665]: E1205 02:30:00.153132 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="extract-utilities" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.153138 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="extract-utilities" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.153340 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="a1231b6e-e966-4486-a272-fee8c3b7b83f" containerName="registry-server" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.154024 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.156445 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.164924 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.165991 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl"] Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.262598 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgpq6\" (UniqueName: \"kubernetes.io/projected/cb1c2561-579b-44f5-8584-2f4d2698d6dc-kube-api-access-xgpq6\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.262834 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb1c2561-579b-44f5-8584-2f4d2698d6dc-secret-volume\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.263055 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb1c2561-579b-44f5-8584-2f4d2698d6dc-config-volume\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.364870 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb1c2561-579b-44f5-8584-2f4d2698d6dc-secret-volume\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.364955 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb1c2561-579b-44f5-8584-2f4d2698d6dc-config-volume\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.365042 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgpq6\" (UniqueName: \"kubernetes.io/projected/cb1c2561-579b-44f5-8584-2f4d2698d6dc-kube-api-access-xgpq6\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.365900 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb1c2561-579b-44f5-8584-2f4d2698d6dc-config-volume\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.380019 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb1c2561-579b-44f5-8584-2f4d2698d6dc-secret-volume\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.382004 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgpq6\" (UniqueName: \"kubernetes.io/projected/cb1c2561-579b-44f5-8584-2f4d2698d6dc-kube-api-access-xgpq6\") pod \"collect-profiles-29415030-ksxfl\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.482443 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:00 crc kubenswrapper[4665]: I1205 02:30:00.940100 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl"] Dec 05 02:30:01 crc kubenswrapper[4665]: I1205 02:30:01.041474 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" event={"ID":"cb1c2561-579b-44f5-8584-2f4d2698d6dc","Type":"ContainerStarted","Data":"7d33f26ce99db0a7630462398b0576555d3f1ec94559c5505267e9f842331e31"} Dec 05 02:30:01 crc kubenswrapper[4665]: I1205 02:30:01.893012 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:30:01 crc kubenswrapper[4665]: E1205 02:30:01.893512 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:30:02 crc kubenswrapper[4665]: I1205 02:30:02.053047 4665 generic.go:334] "Generic (PLEG): container finished" podID="cb1c2561-579b-44f5-8584-2f4d2698d6dc" containerID="f3ab2d3db69baaa7c8e26ef7740019844d23809931c566b73b0eb7194e3751df" exitCode=0 Dec 05 02:30:02 crc kubenswrapper[4665]: I1205 02:30:02.053271 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" event={"ID":"cb1c2561-579b-44f5-8584-2f4d2698d6dc","Type":"ContainerDied","Data":"f3ab2d3db69baaa7c8e26ef7740019844d23809931c566b73b0eb7194e3751df"} Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.238620 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.337891 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb1c2561-579b-44f5-8584-2f4d2698d6dc-config-volume\") pod \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.338229 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xgpq6\" (UniqueName: \"kubernetes.io/projected/cb1c2561-579b-44f5-8584-2f4d2698d6dc-kube-api-access-xgpq6\") pod \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.338279 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb1c2561-579b-44f5-8584-2f4d2698d6dc-secret-volume\") pod \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\" (UID: \"cb1c2561-579b-44f5-8584-2f4d2698d6dc\") " Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.339810 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb1c2561-579b-44f5-8584-2f4d2698d6dc-config-volume" (OuterVolumeSpecName: "config-volume") pod "cb1c2561-579b-44f5-8584-2f4d2698d6dc" (UID: "cb1c2561-579b-44f5-8584-2f4d2698d6dc"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.347515 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb1c2561-579b-44f5-8584-2f4d2698d6dc-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cb1c2561-579b-44f5-8584-2f4d2698d6dc" (UID: "cb1c2561-579b-44f5-8584-2f4d2698d6dc"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.350108 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb1c2561-579b-44f5-8584-2f4d2698d6dc-kube-api-access-xgpq6" (OuterVolumeSpecName: "kube-api-access-xgpq6") pod "cb1c2561-579b-44f5-8584-2f4d2698d6dc" (UID: "cb1c2561-579b-44f5-8584-2f4d2698d6dc"). InnerVolumeSpecName "kube-api-access-xgpq6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.440019 4665 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb1c2561-579b-44f5-8584-2f4d2698d6dc-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.440056 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xgpq6\" (UniqueName: \"kubernetes.io/projected/cb1c2561-579b-44f5-8584-2f4d2698d6dc-kube-api-access-xgpq6\") on node \"crc\" DevicePath \"\"" Dec 05 02:30:04 crc kubenswrapper[4665]: I1205 02:30:04.440066 4665 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb1c2561-579b-44f5-8584-2f4d2698d6dc-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 02:30:05 crc kubenswrapper[4665]: I1205 02:30:05.090765 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" event={"ID":"cb1c2561-579b-44f5-8584-2f4d2698d6dc","Type":"ContainerDied","Data":"7d33f26ce99db0a7630462398b0576555d3f1ec94559c5505267e9f842331e31"} Dec 05 02:30:05 crc kubenswrapper[4665]: I1205 02:30:05.090816 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d33f26ce99db0a7630462398b0576555d3f1ec94559c5505267e9f842331e31" Dec 05 02:30:05 crc kubenswrapper[4665]: I1205 02:30:05.090876 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415030-ksxfl" Dec 05 02:30:05 crc kubenswrapper[4665]: I1205 02:30:05.317959 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm"] Dec 05 02:30:05 crc kubenswrapper[4665]: I1205 02:30:05.326113 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29414985-t5stm"] Dec 05 02:30:06 crc kubenswrapper[4665]: I1205 02:30:06.910186 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ef28b0f-5dfc-45d0-8c97-d7b77e103a06" path="/var/lib/kubelet/pods/8ef28b0f-5dfc-45d0-8c97-d7b77e103a06/volumes" Dec 05 02:30:15 crc kubenswrapper[4665]: I1205 02:30:15.893424 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:30:15 crc kubenswrapper[4665]: E1205 02:30:15.894545 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:30:29 crc kubenswrapper[4665]: I1205 02:30:29.894146 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:30:29 crc kubenswrapper[4665]: E1205 02:30:29.894873 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:30:39 crc kubenswrapper[4665]: I1205 02:30:39.744712 4665 scope.go:117] "RemoveContainer" containerID="c2bae8613bdef9ba607b5fb048aec9de981ed974d20479fe9ab53be84b6ac8ce" Dec 05 02:30:43 crc kubenswrapper[4665]: I1205 02:30:43.894550 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:30:43 crc kubenswrapper[4665]: E1205 02:30:43.903582 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:30:55 crc kubenswrapper[4665]: I1205 02:30:55.893238 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:30:55 crc kubenswrapper[4665]: E1205 02:30:55.894227 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:30:57 crc kubenswrapper[4665]: I1205 02:30:57.565714 4665 generic.go:334] "Generic (PLEG): container finished" podID="25b736e7-0900-478d-a341-d41d78220399" containerID="a26da79a229840ec4ad97e7f56e4502795b9ef57d516059e689a9bd3ccadccd8" exitCode=0 Dec 05 02:30:57 crc kubenswrapper[4665]: I1205 02:30:57.566042 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-s297g/must-gather-bd7lg" event={"ID":"25b736e7-0900-478d-a341-d41d78220399","Type":"ContainerDied","Data":"a26da79a229840ec4ad97e7f56e4502795b9ef57d516059e689a9bd3ccadccd8"} Dec 05 02:30:57 crc kubenswrapper[4665]: I1205 02:30:57.566893 4665 scope.go:117] "RemoveContainer" containerID="a26da79a229840ec4ad97e7f56e4502795b9ef57d516059e689a9bd3ccadccd8" Dec 05 02:30:58 crc kubenswrapper[4665]: I1205 02:30:58.055285 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s297g_must-gather-bd7lg_25b736e7-0900-478d-a341-d41d78220399/gather/0.log" Dec 05 02:31:06 crc kubenswrapper[4665]: I1205 02:31:06.403060 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-s297g/must-gather-bd7lg"] Dec 05 02:31:06 crc kubenswrapper[4665]: I1205 02:31:06.403710 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-s297g/must-gather-bd7lg" podUID="25b736e7-0900-478d-a341-d41d78220399" containerName="copy" containerID="cri-o://7e7c5f4be7efb7ae1bd04ee74f3456ddce473de1354941cf2ce584d4faa45873" gracePeriod=2 Dec 05 02:31:06 crc kubenswrapper[4665]: I1205 02:31:06.460792 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-s297g/must-gather-bd7lg"] Dec 05 02:31:06 crc kubenswrapper[4665]: I1205 02:31:06.718442 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s297g_must-gather-bd7lg_25b736e7-0900-478d-a341-d41d78220399/copy/0.log" Dec 05 02:31:06 crc kubenswrapper[4665]: I1205 02:31:06.733503 4665 generic.go:334] "Generic (PLEG): container finished" podID="25b736e7-0900-478d-a341-d41d78220399" containerID="7e7c5f4be7efb7ae1bd04ee74f3456ddce473de1354941cf2ce584d4faa45873" exitCode=143 Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.058532 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s297g_must-gather-bd7lg_25b736e7-0900-478d-a341-d41d78220399/copy/0.log" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.059344 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.108518 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5l7dl\" (UniqueName: \"kubernetes.io/projected/25b736e7-0900-478d-a341-d41d78220399-kube-api-access-5l7dl\") pod \"25b736e7-0900-478d-a341-d41d78220399\" (UID: \"25b736e7-0900-478d-a341-d41d78220399\") " Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.108581 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b736e7-0900-478d-a341-d41d78220399-must-gather-output\") pod \"25b736e7-0900-478d-a341-d41d78220399\" (UID: \"25b736e7-0900-478d-a341-d41d78220399\") " Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.117361 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25b736e7-0900-478d-a341-d41d78220399-kube-api-access-5l7dl" (OuterVolumeSpecName: "kube-api-access-5l7dl") pod "25b736e7-0900-478d-a341-d41d78220399" (UID: "25b736e7-0900-478d-a341-d41d78220399"). InnerVolumeSpecName "kube-api-access-5l7dl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.211208 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5l7dl\" (UniqueName: \"kubernetes.io/projected/25b736e7-0900-478d-a341-d41d78220399-kube-api-access-5l7dl\") on node \"crc\" DevicePath \"\"" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.298207 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25b736e7-0900-478d-a341-d41d78220399-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "25b736e7-0900-478d-a341-d41d78220399" (UID: "25b736e7-0900-478d-a341-d41d78220399"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.312936 4665 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/25b736e7-0900-478d-a341-d41d78220399-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.746267 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-s297g_must-gather-bd7lg_25b736e7-0900-478d-a341-d41d78220399/copy/0.log" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.746860 4665 scope.go:117] "RemoveContainer" containerID="7e7c5f4be7efb7ae1bd04ee74f3456ddce473de1354941cf2ce584d4faa45873" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.746895 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-s297g/must-gather-bd7lg" Dec 05 02:31:07 crc kubenswrapper[4665]: I1205 02:31:07.773170 4665 scope.go:117] "RemoveContainer" containerID="a26da79a229840ec4ad97e7f56e4502795b9ef57d516059e689a9bd3ccadccd8" Dec 05 02:31:08 crc kubenswrapper[4665]: I1205 02:31:08.903799 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25b736e7-0900-478d-a341-d41d78220399" path="/var/lib/kubelet/pods/25b736e7-0900-478d-a341-d41d78220399/volumes" Dec 05 02:31:10 crc kubenswrapper[4665]: I1205 02:31:10.893878 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:31:10 crc kubenswrapper[4665]: E1205 02:31:10.894508 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:31:23 crc kubenswrapper[4665]: I1205 02:31:23.893527 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:31:24 crc kubenswrapper[4665]: I1205 02:31:24.908581 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"dcc5733d6b54f78f22af95e1b175a0eefa4864e3dc5b4fc934fb623d121ab0f0"} Dec 05 02:31:39 crc kubenswrapper[4665]: I1205 02:31:39.830194 4665 scope.go:117] "RemoveContainer" containerID="3573fd1ce9542373371068acdaee1c50e5123a8b7b8a64939499497df7eaf461" Dec 05 02:32:39 crc kubenswrapper[4665]: I1205 02:32:39.919790 4665 scope.go:117] "RemoveContainer" containerID="68cf7b926e72f9b1a55fd09d6fe2c0d644a08a376c18398a200371696139b1bb" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.592788 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-klv9p"] Dec 05 02:33:35 crc kubenswrapper[4665]: E1205 02:33:35.593847 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25b736e7-0900-478d-a341-d41d78220399" containerName="copy" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.593863 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="25b736e7-0900-478d-a341-d41d78220399" containerName="copy" Dec 05 02:33:35 crc kubenswrapper[4665]: E1205 02:33:35.593877 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25b736e7-0900-478d-a341-d41d78220399" containerName="gather" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.593884 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="25b736e7-0900-478d-a341-d41d78220399" containerName="gather" Dec 05 02:33:35 crc kubenswrapper[4665]: E1205 02:33:35.593920 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cb1c2561-579b-44f5-8584-2f4d2698d6dc" containerName="collect-profiles" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.593929 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="cb1c2561-579b-44f5-8584-2f4d2698d6dc" containerName="collect-profiles" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.594145 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="25b736e7-0900-478d-a341-d41d78220399" containerName="gather" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.594170 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="25b736e7-0900-478d-a341-d41d78220399" containerName="copy" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.594186 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="cb1c2561-579b-44f5-8584-2f4d2698d6dc" containerName="collect-profiles" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.595849 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.607187 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-klv9p"] Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.659715 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgz6x\" (UniqueName: \"kubernetes.io/projected/62bcc4d1-7703-4ff4-a08c-6d50557d1610-kube-api-access-vgz6x\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.660082 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-utilities\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.660125 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-catalog-content\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.761216 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-utilities\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.761283 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-catalog-content\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.761389 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgz6x\" (UniqueName: \"kubernetes.io/projected/62bcc4d1-7703-4ff4-a08c-6d50557d1610-kube-api-access-vgz6x\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.762638 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-utilities\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.762920 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-catalog-content\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.909197 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgz6x\" (UniqueName: \"kubernetes.io/projected/62bcc4d1-7703-4ff4-a08c-6d50557d1610-kube-api-access-vgz6x\") pod \"redhat-marketplace-klv9p\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:35 crc kubenswrapper[4665]: I1205 02:33:35.917210 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:36 crc kubenswrapper[4665]: I1205 02:33:36.670953 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-klv9p"] Dec 05 02:33:37 crc kubenswrapper[4665]: I1205 02:33:37.181530 4665 generic.go:334] "Generic (PLEG): container finished" podID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerID="06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3" exitCode=0 Dec 05 02:33:37 crc kubenswrapper[4665]: I1205 02:33:37.181684 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klv9p" event={"ID":"62bcc4d1-7703-4ff4-a08c-6d50557d1610","Type":"ContainerDied","Data":"06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3"} Dec 05 02:33:37 crc kubenswrapper[4665]: I1205 02:33:37.181864 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klv9p" event={"ID":"62bcc4d1-7703-4ff4-a08c-6d50557d1610","Type":"ContainerStarted","Data":"2ec72f135ead9cce01a2d624f22c3423cadd22553c4c242272676a06e58d5d44"} Dec 05 02:33:38 crc kubenswrapper[4665]: I1205 02:33:38.191219 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klv9p" event={"ID":"62bcc4d1-7703-4ff4-a08c-6d50557d1610","Type":"ContainerStarted","Data":"495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2"} Dec 05 02:33:39 crc kubenswrapper[4665]: I1205 02:33:39.201747 4665 generic.go:334] "Generic (PLEG): container finished" podID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerID="495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2" exitCode=0 Dec 05 02:33:39 crc kubenswrapper[4665]: I1205 02:33:39.201937 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klv9p" event={"ID":"62bcc4d1-7703-4ff4-a08c-6d50557d1610","Type":"ContainerDied","Data":"495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2"} Dec 05 02:33:40 crc kubenswrapper[4665]: I1205 02:33:40.211110 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klv9p" event={"ID":"62bcc4d1-7703-4ff4-a08c-6d50557d1610","Type":"ContainerStarted","Data":"d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab"} Dec 05 02:33:40 crc kubenswrapper[4665]: I1205 02:33:40.234307 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-klv9p" podStartSLOduration=2.803202578 podStartE2EDuration="5.234276453s" podCreationTimestamp="2025-12-05 02:33:35 +0000 UTC" firstStartedPulling="2025-12-05 02:33:37.186025054 +0000 UTC m=+4992.525417353" lastFinishedPulling="2025-12-05 02:33:39.617098929 +0000 UTC m=+4994.956491228" observedRunningTime="2025-12-05 02:33:40.227182982 +0000 UTC m=+4995.566575311" watchObservedRunningTime="2025-12-05 02:33:40.234276453 +0000 UTC m=+4995.573668752" Dec 05 02:33:44 crc kubenswrapper[4665]: I1205 02:33:44.921937 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:33:44 crc kubenswrapper[4665]: I1205 02:33:44.922611 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:33:45 crc kubenswrapper[4665]: I1205 02:33:45.918137 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:45 crc kubenswrapper[4665]: I1205 02:33:45.918723 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:45 crc kubenswrapper[4665]: I1205 02:33:45.982154 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:46 crc kubenswrapper[4665]: I1205 02:33:46.333096 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:46 crc kubenswrapper[4665]: I1205 02:33:46.381374 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-klv9p"] Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.304131 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-klv9p" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerName="registry-server" containerID="cri-o://d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab" gracePeriod=2 Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.739743 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.837316 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-catalog-content\") pod \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.837482 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-utilities\") pod \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.837532 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgz6x\" (UniqueName: \"kubernetes.io/projected/62bcc4d1-7703-4ff4-a08c-6d50557d1610-kube-api-access-vgz6x\") pod \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\" (UID: \"62bcc4d1-7703-4ff4-a08c-6d50557d1610\") " Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.838457 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-utilities" (OuterVolumeSpecName: "utilities") pod "62bcc4d1-7703-4ff4-a08c-6d50557d1610" (UID: "62bcc4d1-7703-4ff4-a08c-6d50557d1610"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.844946 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62bcc4d1-7703-4ff4-a08c-6d50557d1610-kube-api-access-vgz6x" (OuterVolumeSpecName: "kube-api-access-vgz6x") pod "62bcc4d1-7703-4ff4-a08c-6d50557d1610" (UID: "62bcc4d1-7703-4ff4-a08c-6d50557d1610"). InnerVolumeSpecName "kube-api-access-vgz6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.860102 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62bcc4d1-7703-4ff4-a08c-6d50557d1610" (UID: "62bcc4d1-7703-4ff4-a08c-6d50557d1610"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.940507 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.940535 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62bcc4d1-7703-4ff4-a08c-6d50557d1610-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:33:48 crc kubenswrapper[4665]: I1205 02:33:48.940545 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgz6x\" (UniqueName: \"kubernetes.io/projected/62bcc4d1-7703-4ff4-a08c-6d50557d1610-kube-api-access-vgz6x\") on node \"crc\" DevicePath \"\"" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.316812 4665 generic.go:334] "Generic (PLEG): container finished" podID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerID="d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab" exitCode=0 Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.316855 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klv9p" event={"ID":"62bcc4d1-7703-4ff4-a08c-6d50557d1610","Type":"ContainerDied","Data":"d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab"} Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.316889 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-klv9p" event={"ID":"62bcc4d1-7703-4ff4-a08c-6d50557d1610","Type":"ContainerDied","Data":"2ec72f135ead9cce01a2d624f22c3423cadd22553c4c242272676a06e58d5d44"} Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.316918 4665 scope.go:117] "RemoveContainer" containerID="d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.317054 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-klv9p" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.344659 4665 scope.go:117] "RemoveContainer" containerID="495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.344660 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-klv9p"] Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.356466 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-klv9p"] Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.379241 4665 scope.go:117] "RemoveContainer" containerID="06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.439003 4665 scope.go:117] "RemoveContainer" containerID="d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab" Dec 05 02:33:49 crc kubenswrapper[4665]: E1205 02:33:49.439425 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab\": container with ID starting with d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab not found: ID does not exist" containerID="d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.439450 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab"} err="failed to get container status \"d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab\": rpc error: code = NotFound desc = could not find container \"d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab\": container with ID starting with d4fa2e6bba7fcdb93e73c909ed84dd2ed5e85a588126bb9524cdab39f650a6ab not found: ID does not exist" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.439469 4665 scope.go:117] "RemoveContainer" containerID="495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2" Dec 05 02:33:49 crc kubenswrapper[4665]: E1205 02:33:49.439818 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2\": container with ID starting with 495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2 not found: ID does not exist" containerID="495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.439893 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2"} err="failed to get container status \"495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2\": rpc error: code = NotFound desc = could not find container \"495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2\": container with ID starting with 495a7f3dba5d38eebcc24ce49126c7201962a08cd88a4042cdf3491f7b0defa2 not found: ID does not exist" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.439950 4665 scope.go:117] "RemoveContainer" containerID="06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3" Dec 05 02:33:49 crc kubenswrapper[4665]: E1205 02:33:49.440401 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3\": container with ID starting with 06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3 not found: ID does not exist" containerID="06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3" Dec 05 02:33:49 crc kubenswrapper[4665]: I1205 02:33:49.440473 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3"} err="failed to get container status \"06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3\": rpc error: code = NotFound desc = could not find container \"06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3\": container with ID starting with 06a9364f0543eac77c76f79e7f6731399c28665465d1161e676d5daa65744ea3 not found: ID does not exist" Dec 05 02:33:50 crc kubenswrapper[4665]: I1205 02:33:50.903009 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" path="/var/lib/kubelet/pods/62bcc4d1-7703-4ff4-a08c-6d50557d1610/volumes" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.432791 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gjkrl/must-gather-l4xct"] Dec 05 02:34:09 crc kubenswrapper[4665]: E1205 02:34:09.440581 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerName="extract-utilities" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.440614 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerName="extract-utilities" Dec 05 02:34:09 crc kubenswrapper[4665]: E1205 02:34:09.440636 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerName="extract-content" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.440642 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerName="extract-content" Dec 05 02:34:09 crc kubenswrapper[4665]: E1205 02:34:09.440662 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerName="registry-server" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.440668 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerName="registry-server" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.440850 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="62bcc4d1-7703-4ff4-a08c-6d50557d1610" containerName="registry-server" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.441825 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.444815 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-gjkrl"/"kube-root-ca.crt" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.445011 4665 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-gjkrl"/"openshift-service-ca.crt" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.469634 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-gjkrl/must-gather-l4xct"] Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.541941 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wc7ms\" (UniqueName: \"kubernetes.io/projected/067213a1-4227-401f-b02f-be7bb4adafd4-kube-api-access-wc7ms\") pod \"must-gather-l4xct\" (UID: \"067213a1-4227-401f-b02f-be7bb4adafd4\") " pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.542217 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/067213a1-4227-401f-b02f-be7bb4adafd4-must-gather-output\") pod \"must-gather-l4xct\" (UID: \"067213a1-4227-401f-b02f-be7bb4adafd4\") " pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.644343 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/067213a1-4227-401f-b02f-be7bb4adafd4-must-gather-output\") pod \"must-gather-l4xct\" (UID: \"067213a1-4227-401f-b02f-be7bb4adafd4\") " pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.644785 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wc7ms\" (UniqueName: \"kubernetes.io/projected/067213a1-4227-401f-b02f-be7bb4adafd4-kube-api-access-wc7ms\") pod \"must-gather-l4xct\" (UID: \"067213a1-4227-401f-b02f-be7bb4adafd4\") " pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.645546 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/067213a1-4227-401f-b02f-be7bb4adafd4-must-gather-output\") pod \"must-gather-l4xct\" (UID: \"067213a1-4227-401f-b02f-be7bb4adafd4\") " pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:34:09 crc kubenswrapper[4665]: I1205 02:34:09.811882 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wc7ms\" (UniqueName: \"kubernetes.io/projected/067213a1-4227-401f-b02f-be7bb4adafd4-kube-api-access-wc7ms\") pod \"must-gather-l4xct\" (UID: \"067213a1-4227-401f-b02f-be7bb4adafd4\") " pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:34:10 crc kubenswrapper[4665]: I1205 02:34:10.068593 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:34:10 crc kubenswrapper[4665]: I1205 02:34:10.635988 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-gjkrl/must-gather-l4xct"] Dec 05 02:34:11 crc kubenswrapper[4665]: I1205 02:34:11.499499 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/must-gather-l4xct" event={"ID":"067213a1-4227-401f-b02f-be7bb4adafd4","Type":"ContainerStarted","Data":"d3f5c0f9a81c21f76cc2f6b8bd23c78c4044669df8fb1f587d76190d1a474bb7"} Dec 05 02:34:11 crc kubenswrapper[4665]: I1205 02:34:11.499881 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/must-gather-l4xct" event={"ID":"067213a1-4227-401f-b02f-be7bb4adafd4","Type":"ContainerStarted","Data":"5f501d728b5de7d806a1e3722f6356a6b70e035cd98cc36b59fcbf7d3f93a544"} Dec 05 02:34:11 crc kubenswrapper[4665]: I1205 02:34:11.499897 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/must-gather-l4xct" event={"ID":"067213a1-4227-401f-b02f-be7bb4adafd4","Type":"ContainerStarted","Data":"40025bf92885762fc6ce099d1c1b39caa087b44c971ad15addc63c7d4034c506"} Dec 05 02:34:11 crc kubenswrapper[4665]: I1205 02:34:11.526381 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gjkrl/must-gather-l4xct" podStartSLOduration=2.526355905 podStartE2EDuration="2.526355905s" podCreationTimestamp="2025-12-05 02:34:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 02:34:11.51743832 +0000 UTC m=+5026.856830619" watchObservedRunningTime="2025-12-05 02:34:11.526355905 +0000 UTC m=+5026.865748244" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.581072 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-blrpv"] Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.583392 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.598719 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-blrpv"] Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.722394 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-utilities\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.722698 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-catalog-content\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.722865 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbnxn\" (UniqueName: \"kubernetes.io/projected/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-kube-api-access-pbnxn\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.825024 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-utilities\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.825149 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-catalog-content\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.825200 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbnxn\" (UniqueName: \"kubernetes.io/projected/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-kube-api-access-pbnxn\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.825548 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-utilities\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.825892 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-catalog-content\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.845441 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbnxn\" (UniqueName: \"kubernetes.io/projected/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-kube-api-access-pbnxn\") pod \"community-operators-blrpv\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:12 crc kubenswrapper[4665]: I1205 02:34:12.904788 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:13 crc kubenswrapper[4665]: I1205 02:34:13.525575 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-blrpv"] Dec 05 02:34:14 crc kubenswrapper[4665]: I1205 02:34:14.521505 4665 generic.go:334] "Generic (PLEG): container finished" podID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerID="0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7" exitCode=0 Dec 05 02:34:14 crc kubenswrapper[4665]: I1205 02:34:14.521687 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-blrpv" event={"ID":"bac1c25d-7a27-42b2-8e44-fa22cc252f3c","Type":"ContainerDied","Data":"0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7"} Dec 05 02:34:14 crc kubenswrapper[4665]: I1205 02:34:14.521788 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-blrpv" event={"ID":"bac1c25d-7a27-42b2-8e44-fa22cc252f3c","Type":"ContainerStarted","Data":"0886b22862257a805a44e111a978f25f7c20c6b3bd62622d9ba7ba5f92a6eaff"} Dec 05 02:34:14 crc kubenswrapper[4665]: I1205 02:34:14.523661 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 02:34:14 crc kubenswrapper[4665]: I1205 02:34:14.922339 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:34:14 crc kubenswrapper[4665]: I1205 02:34:14.922670 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.335220 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-2f5dt"] Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.336333 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.340117 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-gjkrl"/"default-dockercfg-ksv9r" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.471611 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f53d9f7c-fcef-4fb8-9196-50014c5b6285-host\") pod \"crc-debug-2f5dt\" (UID: \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\") " pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.471772 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f7cqt\" (UniqueName: \"kubernetes.io/projected/f53d9f7c-fcef-4fb8-9196-50014c5b6285-kube-api-access-f7cqt\") pod \"crc-debug-2f5dt\" (UID: \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\") " pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.532766 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-blrpv" event={"ID":"bac1c25d-7a27-42b2-8e44-fa22cc252f3c","Type":"ContainerStarted","Data":"d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e"} Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.573402 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f7cqt\" (UniqueName: \"kubernetes.io/projected/f53d9f7c-fcef-4fb8-9196-50014c5b6285-kube-api-access-f7cqt\") pod \"crc-debug-2f5dt\" (UID: \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\") " pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.573507 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f53d9f7c-fcef-4fb8-9196-50014c5b6285-host\") pod \"crc-debug-2f5dt\" (UID: \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\") " pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.573614 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f53d9f7c-fcef-4fb8-9196-50014c5b6285-host\") pod \"crc-debug-2f5dt\" (UID: \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\") " pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.601366 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f7cqt\" (UniqueName: \"kubernetes.io/projected/f53d9f7c-fcef-4fb8-9196-50014c5b6285-kube-api-access-f7cqt\") pod \"crc-debug-2f5dt\" (UID: \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\") " pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:34:15 crc kubenswrapper[4665]: I1205 02:34:15.651350 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:34:16 crc kubenswrapper[4665]: I1205 02:34:16.543824 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" event={"ID":"f53d9f7c-fcef-4fb8-9196-50014c5b6285","Type":"ContainerStarted","Data":"a468fca5ca066ffc73a94a7295c021fdae79c083f84b9a76a507c1c3c9ddd5a9"} Dec 05 02:34:16 crc kubenswrapper[4665]: I1205 02:34:16.544494 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" event={"ID":"f53d9f7c-fcef-4fb8-9196-50014c5b6285","Type":"ContainerStarted","Data":"16c2da002c699abe529a18d97a8b78cbff5ee2c718af445bd36167b41e104604"} Dec 05 02:34:16 crc kubenswrapper[4665]: I1205 02:34:16.546529 4665 generic.go:334] "Generic (PLEG): container finished" podID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerID="d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e" exitCode=0 Dec 05 02:34:16 crc kubenswrapper[4665]: I1205 02:34:16.546573 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-blrpv" event={"ID":"bac1c25d-7a27-42b2-8e44-fa22cc252f3c","Type":"ContainerDied","Data":"d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e"} Dec 05 02:34:16 crc kubenswrapper[4665]: I1205 02:34:16.565879 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" podStartSLOduration=1.565859622 podStartE2EDuration="1.565859622s" podCreationTimestamp="2025-12-05 02:34:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 02:34:16.557199873 +0000 UTC m=+5031.896592182" watchObservedRunningTime="2025-12-05 02:34:16.565859622 +0000 UTC m=+5031.905251921" Dec 05 02:34:17 crc kubenswrapper[4665]: I1205 02:34:17.574979 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-blrpv" event={"ID":"bac1c25d-7a27-42b2-8e44-fa22cc252f3c","Type":"ContainerStarted","Data":"c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2"} Dec 05 02:34:17 crc kubenswrapper[4665]: I1205 02:34:17.604383 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-blrpv" podStartSLOduration=2.955322483 podStartE2EDuration="5.604364916s" podCreationTimestamp="2025-12-05 02:34:12 +0000 UTC" firstStartedPulling="2025-12-05 02:34:14.523444418 +0000 UTC m=+5029.862836717" lastFinishedPulling="2025-12-05 02:34:17.172486841 +0000 UTC m=+5032.511879150" observedRunningTime="2025-12-05 02:34:17.592884749 +0000 UTC m=+5032.932277048" watchObservedRunningTime="2025-12-05 02:34:17.604364916 +0000 UTC m=+5032.943757235" Dec 05 02:34:22 crc kubenswrapper[4665]: I1205 02:34:22.905388 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:22 crc kubenswrapper[4665]: I1205 02:34:22.905879 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:22 crc kubenswrapper[4665]: I1205 02:34:22.961561 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:23 crc kubenswrapper[4665]: I1205 02:34:23.676045 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:23 crc kubenswrapper[4665]: I1205 02:34:23.787145 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-blrpv"] Dec 05 02:34:25 crc kubenswrapper[4665]: I1205 02:34:25.644425 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-blrpv" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerName="registry-server" containerID="cri-o://c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2" gracePeriod=2 Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.655944 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.656856 4665 generic.go:334] "Generic (PLEG): container finished" podID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerID="c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2" exitCode=0 Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.656890 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-blrpv" event={"ID":"bac1c25d-7a27-42b2-8e44-fa22cc252f3c","Type":"ContainerDied","Data":"c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2"} Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.656914 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-blrpv" event={"ID":"bac1c25d-7a27-42b2-8e44-fa22cc252f3c","Type":"ContainerDied","Data":"0886b22862257a805a44e111a978f25f7c20c6b3bd62622d9ba7ba5f92a6eaff"} Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.656929 4665 scope.go:117] "RemoveContainer" containerID="c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.693116 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-utilities\") pod \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.693168 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pbnxn\" (UniqueName: \"kubernetes.io/projected/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-kube-api-access-pbnxn\") pod \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.693253 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-catalog-content\") pod \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\" (UID: \"bac1c25d-7a27-42b2-8e44-fa22cc252f3c\") " Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.694039 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-utilities" (OuterVolumeSpecName: "utilities") pod "bac1c25d-7a27-42b2-8e44-fa22cc252f3c" (UID: "bac1c25d-7a27-42b2-8e44-fa22cc252f3c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.701094 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-kube-api-access-pbnxn" (OuterVolumeSpecName: "kube-api-access-pbnxn") pod "bac1c25d-7a27-42b2-8e44-fa22cc252f3c" (UID: "bac1c25d-7a27-42b2-8e44-fa22cc252f3c"). InnerVolumeSpecName "kube-api-access-pbnxn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.729926 4665 scope.go:117] "RemoveContainer" containerID="d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.753687 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bac1c25d-7a27-42b2-8e44-fa22cc252f3c" (UID: "bac1c25d-7a27-42b2-8e44-fa22cc252f3c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.760223 4665 scope.go:117] "RemoveContainer" containerID="0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.795932 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.796520 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.796537 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pbnxn\" (UniqueName: \"kubernetes.io/projected/bac1c25d-7a27-42b2-8e44-fa22cc252f3c-kube-api-access-pbnxn\") on node \"crc\" DevicePath \"\"" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.801139 4665 scope.go:117] "RemoveContainer" containerID="c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2" Dec 05 02:34:26 crc kubenswrapper[4665]: E1205 02:34:26.801623 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2\": container with ID starting with c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2 not found: ID does not exist" containerID="c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.801658 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2"} err="failed to get container status \"c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2\": rpc error: code = NotFound desc = could not find container \"c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2\": container with ID starting with c0941b8d8b658a3e37ab93d66c19cff294e5c02902f1f8d9a650693dcc0679a2 not found: ID does not exist" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.801686 4665 scope.go:117] "RemoveContainer" containerID="d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e" Dec 05 02:34:26 crc kubenswrapper[4665]: E1205 02:34:26.802765 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e\": container with ID starting with d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e not found: ID does not exist" containerID="d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.802795 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e"} err="failed to get container status \"d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e\": rpc error: code = NotFound desc = could not find container \"d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e\": container with ID starting with d665ede68f6f7587262e29d1133ab6481abe07f7bc1dbf5a416a7d354853f54e not found: ID does not exist" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.802814 4665 scope.go:117] "RemoveContainer" containerID="0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7" Dec 05 02:34:26 crc kubenswrapper[4665]: E1205 02:34:26.803104 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7\": container with ID starting with 0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7 not found: ID does not exist" containerID="0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7" Dec 05 02:34:26 crc kubenswrapper[4665]: I1205 02:34:26.803133 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7"} err="failed to get container status \"0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7\": rpc error: code = NotFound desc = could not find container \"0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7\": container with ID starting with 0b932d1794d7f109951d2d9bbf66e571cc43199158d9199fc1608f79dcce41f7 not found: ID does not exist" Dec 05 02:34:27 crc kubenswrapper[4665]: I1205 02:34:27.665102 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-blrpv" Dec 05 02:34:27 crc kubenswrapper[4665]: I1205 02:34:27.687866 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-blrpv"] Dec 05 02:34:27 crc kubenswrapper[4665]: I1205 02:34:27.695146 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-blrpv"] Dec 05 02:34:28 crc kubenswrapper[4665]: I1205 02:34:28.904065 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" path="/var/lib/kubelet/pods/bac1c25d-7a27-42b2-8e44-fa22cc252f3c/volumes" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.181642 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-t6st2"] Dec 05 02:34:32 crc kubenswrapper[4665]: E1205 02:34:32.183417 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerName="registry-server" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.183496 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerName="registry-server" Dec 05 02:34:32 crc kubenswrapper[4665]: E1205 02:34:32.183555 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerName="extract-content" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.183602 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerName="extract-content" Dec 05 02:34:32 crc kubenswrapper[4665]: E1205 02:34:32.183670 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerName="extract-utilities" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.183717 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerName="extract-utilities" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.183939 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="bac1c25d-7a27-42b2-8e44-fa22cc252f3c" containerName="registry-server" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.185322 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.193221 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t6st2"] Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.294460 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h69nl\" (UniqueName: \"kubernetes.io/projected/1d4139ac-ffd2-4817-9ba8-23212cd71b14-kube-api-access-h69nl\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.294562 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-catalog-content\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.294632 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-utilities\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.395998 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-utilities\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.396380 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h69nl\" (UniqueName: \"kubernetes.io/projected/1d4139ac-ffd2-4817-9ba8-23212cd71b14-kube-api-access-h69nl\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.396546 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-catalog-content\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.396574 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-utilities\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.396804 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-catalog-content\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.418145 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h69nl\" (UniqueName: \"kubernetes.io/projected/1d4139ac-ffd2-4817-9ba8-23212cd71b14-kube-api-access-h69nl\") pod \"certified-operators-t6st2\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:32 crc kubenswrapper[4665]: I1205 02:34:32.507771 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:33 crc kubenswrapper[4665]: I1205 02:34:33.131994 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-t6st2"] Dec 05 02:34:33 crc kubenswrapper[4665]: I1205 02:34:33.731620 4665 generic.go:334] "Generic (PLEG): container finished" podID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerID="32e81ccec7beb22dfeaae1124c9c4a77205f07bb9734c296dcab5e4a594fc932" exitCode=0 Dec 05 02:34:33 crc kubenswrapper[4665]: I1205 02:34:33.731658 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6st2" event={"ID":"1d4139ac-ffd2-4817-9ba8-23212cd71b14","Type":"ContainerDied","Data":"32e81ccec7beb22dfeaae1124c9c4a77205f07bb9734c296dcab5e4a594fc932"} Dec 05 02:34:33 crc kubenswrapper[4665]: I1205 02:34:33.731681 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6st2" event={"ID":"1d4139ac-ffd2-4817-9ba8-23212cd71b14","Type":"ContainerStarted","Data":"ac812dc69050eac96b255d56290bcdee3e22f71f573223ebf3ef172107c20883"} Dec 05 02:34:34 crc kubenswrapper[4665]: I1205 02:34:34.742717 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6st2" event={"ID":"1d4139ac-ffd2-4817-9ba8-23212cd71b14","Type":"ContainerStarted","Data":"a4c1b8b91c5b5b3458ad3d0c8e4f3d0e0df9e062859843e26c514a552e07bf44"} Dec 05 02:34:36 crc kubenswrapper[4665]: I1205 02:34:36.761774 4665 generic.go:334] "Generic (PLEG): container finished" podID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerID="a4c1b8b91c5b5b3458ad3d0c8e4f3d0e0df9e062859843e26c514a552e07bf44" exitCode=0 Dec 05 02:34:36 crc kubenswrapper[4665]: I1205 02:34:36.761854 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6st2" event={"ID":"1d4139ac-ffd2-4817-9ba8-23212cd71b14","Type":"ContainerDied","Data":"a4c1b8b91c5b5b3458ad3d0c8e4f3d0e0df9e062859843e26c514a552e07bf44"} Dec 05 02:34:38 crc kubenswrapper[4665]: I1205 02:34:38.781387 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6st2" event={"ID":"1d4139ac-ffd2-4817-9ba8-23212cd71b14","Type":"ContainerStarted","Data":"a33cb686377178c046df4cf34ef52c1cb17d0ed9a613ae9248a19fb467e29c59"} Dec 05 02:34:42 crc kubenswrapper[4665]: I1205 02:34:42.508623 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:42 crc kubenswrapper[4665]: I1205 02:34:42.510655 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:42 crc kubenswrapper[4665]: I1205 02:34:42.562086 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:42 crc kubenswrapper[4665]: I1205 02:34:42.585524 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-t6st2" podStartSLOduration=7.112189781 podStartE2EDuration="10.585502713s" podCreationTimestamp="2025-12-05 02:34:32 +0000 UTC" firstStartedPulling="2025-12-05 02:34:33.734713455 +0000 UTC m=+5049.074105744" lastFinishedPulling="2025-12-05 02:34:37.208026377 +0000 UTC m=+5052.547418676" observedRunningTime="2025-12-05 02:34:38.816036932 +0000 UTC m=+5054.155429231" watchObservedRunningTime="2025-12-05 02:34:42.585502713 +0000 UTC m=+5057.924895012" Dec 05 02:34:42 crc kubenswrapper[4665]: I1205 02:34:42.872277 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:43 crc kubenswrapper[4665]: I1205 02:34:43.779485 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t6st2"] Dec 05 02:34:44 crc kubenswrapper[4665]: I1205 02:34:44.830848 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-t6st2" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerName="registry-server" containerID="cri-o://a33cb686377178c046df4cf34ef52c1cb17d0ed9a613ae9248a19fb467e29c59" gracePeriod=2 Dec 05 02:34:44 crc kubenswrapper[4665]: I1205 02:34:44.922328 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:34:44 crc kubenswrapper[4665]: I1205 02:34:44.922492 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:34:44 crc kubenswrapper[4665]: I1205 02:34:44.922544 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 02:34:44 crc kubenswrapper[4665]: I1205 02:34:44.923322 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"dcc5733d6b54f78f22af95e1b175a0eefa4864e3dc5b4fc934fb623d121ab0f0"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 02:34:44 crc kubenswrapper[4665]: I1205 02:34:44.923385 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://dcc5733d6b54f78f22af95e1b175a0eefa4864e3dc5b4fc934fb623d121ab0f0" gracePeriod=600 Dec 05 02:34:45 crc kubenswrapper[4665]: I1205 02:34:45.854619 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="dcc5733d6b54f78f22af95e1b175a0eefa4864e3dc5b4fc934fb623d121ab0f0" exitCode=0 Dec 05 02:34:45 crc kubenswrapper[4665]: I1205 02:34:45.854881 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"dcc5733d6b54f78f22af95e1b175a0eefa4864e3dc5b4fc934fb623d121ab0f0"} Dec 05 02:34:45 crc kubenswrapper[4665]: I1205 02:34:45.854914 4665 scope.go:117] "RemoveContainer" containerID="2271020dcc519b18ae35854217faadbff8fff74fc2a4159ea1d3db8f6e0b3e06" Dec 05 02:34:45 crc kubenswrapper[4665]: I1205 02:34:45.866451 4665 generic.go:334] "Generic (PLEG): container finished" podID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerID="a33cb686377178c046df4cf34ef52c1cb17d0ed9a613ae9248a19fb467e29c59" exitCode=0 Dec 05 02:34:45 crc kubenswrapper[4665]: I1205 02:34:45.866492 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6st2" event={"ID":"1d4139ac-ffd2-4817-9ba8-23212cd71b14","Type":"ContainerDied","Data":"a33cb686377178c046df4cf34ef52c1cb17d0ed9a613ae9248a19fb467e29c59"} Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.023561 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.182938 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h69nl\" (UniqueName: \"kubernetes.io/projected/1d4139ac-ffd2-4817-9ba8-23212cd71b14-kube-api-access-h69nl\") pod \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.183096 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-utilities\") pod \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.183440 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-catalog-content\") pod \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\" (UID: \"1d4139ac-ffd2-4817-9ba8-23212cd71b14\") " Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.184236 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-utilities" (OuterVolumeSpecName: "utilities") pod "1d4139ac-ffd2-4817-9ba8-23212cd71b14" (UID: "1d4139ac-ffd2-4817-9ba8-23212cd71b14"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.194032 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d4139ac-ffd2-4817-9ba8-23212cd71b14-kube-api-access-h69nl" (OuterVolumeSpecName: "kube-api-access-h69nl") pod "1d4139ac-ffd2-4817-9ba8-23212cd71b14" (UID: "1d4139ac-ffd2-4817-9ba8-23212cd71b14"). InnerVolumeSpecName "kube-api-access-h69nl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.239393 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d4139ac-ffd2-4817-9ba8-23212cd71b14" (UID: "1d4139ac-ffd2-4817-9ba8-23212cd71b14"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.288622 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h69nl\" (UniqueName: \"kubernetes.io/projected/1d4139ac-ffd2-4817-9ba8-23212cd71b14-kube-api-access-h69nl\") on node \"crc\" DevicePath \"\"" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.288654 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.288664 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d4139ac-ffd2-4817-9ba8-23212cd71b14-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.877782 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec"} Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.880975 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-t6st2" event={"ID":"1d4139ac-ffd2-4817-9ba8-23212cd71b14","Type":"ContainerDied","Data":"ac812dc69050eac96b255d56290bcdee3e22f71f573223ebf3ef172107c20883"} Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.881019 4665 scope.go:117] "RemoveContainer" containerID="a33cb686377178c046df4cf34ef52c1cb17d0ed9a613ae9248a19fb467e29c59" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.881102 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-t6st2" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.932004 4665 scope.go:117] "RemoveContainer" containerID="a4c1b8b91c5b5b3458ad3d0c8e4f3d0e0df9e062859843e26c514a552e07bf44" Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.949501 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-t6st2"] Dec 05 02:34:46 crc kubenswrapper[4665]: I1205 02:34:46.959729 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-t6st2"] Dec 05 02:34:47 crc kubenswrapper[4665]: I1205 02:34:47.022368 4665 scope.go:117] "RemoveContainer" containerID="32e81ccec7beb22dfeaae1124c9c4a77205f07bb9734c296dcab5e4a594fc932" Dec 05 02:34:48 crc kubenswrapper[4665]: I1205 02:34:48.902539 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" path="/var/lib/kubelet/pods/1d4139ac-ffd2-4817-9ba8-23212cd71b14/volumes" Dec 05 02:35:03 crc kubenswrapper[4665]: I1205 02:35:03.028148 4665 generic.go:334] "Generic (PLEG): container finished" podID="f53d9f7c-fcef-4fb8-9196-50014c5b6285" containerID="a468fca5ca066ffc73a94a7295c021fdae79c083f84b9a76a507c1c3c9ddd5a9" exitCode=0 Dec 05 02:35:03 crc kubenswrapper[4665]: I1205 02:35:03.028240 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" event={"ID":"f53d9f7c-fcef-4fb8-9196-50014c5b6285","Type":"ContainerDied","Data":"a468fca5ca066ffc73a94a7295c021fdae79c083f84b9a76a507c1c3c9ddd5a9"} Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.161785 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.202280 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-2f5dt"] Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.210045 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-2f5dt"] Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.362733 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f53d9f7c-fcef-4fb8-9196-50014c5b6285-host\") pod \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\" (UID: \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\") " Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.362874 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f53d9f7c-fcef-4fb8-9196-50014c5b6285-host" (OuterVolumeSpecName: "host") pod "f53d9f7c-fcef-4fb8-9196-50014c5b6285" (UID: "f53d9f7c-fcef-4fb8-9196-50014c5b6285"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.363257 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f7cqt\" (UniqueName: \"kubernetes.io/projected/f53d9f7c-fcef-4fb8-9196-50014c5b6285-kube-api-access-f7cqt\") pod \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\" (UID: \"f53d9f7c-fcef-4fb8-9196-50014c5b6285\") " Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.363742 4665 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/f53d9f7c-fcef-4fb8-9196-50014c5b6285-host\") on node \"crc\" DevicePath \"\"" Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.373678 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f53d9f7c-fcef-4fb8-9196-50014c5b6285-kube-api-access-f7cqt" (OuterVolumeSpecName: "kube-api-access-f7cqt") pod "f53d9f7c-fcef-4fb8-9196-50014c5b6285" (UID: "f53d9f7c-fcef-4fb8-9196-50014c5b6285"). InnerVolumeSpecName "kube-api-access-f7cqt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.465599 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f7cqt\" (UniqueName: \"kubernetes.io/projected/f53d9f7c-fcef-4fb8-9196-50014c5b6285-kube-api-access-f7cqt\") on node \"crc\" DevicePath \"\"" Dec 05 02:35:04 crc kubenswrapper[4665]: I1205 02:35:04.904619 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f53d9f7c-fcef-4fb8-9196-50014c5b6285" path="/var/lib/kubelet/pods/f53d9f7c-fcef-4fb8-9196-50014c5b6285/volumes" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.051680 4665 scope.go:117] "RemoveContainer" containerID="a468fca5ca066ffc73a94a7295c021fdae79c083f84b9a76a507c1c3c9ddd5a9" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.051751 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-2f5dt" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.390811 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-zkr48"] Dec 05 02:35:05 crc kubenswrapper[4665]: E1205 02:35:05.392812 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerName="registry-server" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.392905 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerName="registry-server" Dec 05 02:35:05 crc kubenswrapper[4665]: E1205 02:35:05.392990 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerName="extract-content" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.393046 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerName="extract-content" Dec 05 02:35:05 crc kubenswrapper[4665]: E1205 02:35:05.393109 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f53d9f7c-fcef-4fb8-9196-50014c5b6285" containerName="container-00" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.393162 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="f53d9f7c-fcef-4fb8-9196-50014c5b6285" containerName="container-00" Dec 05 02:35:05 crc kubenswrapper[4665]: E1205 02:35:05.393218 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerName="extract-utilities" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.393271 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerName="extract-utilities" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.393531 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="1d4139ac-ffd2-4817-9ba8-23212cd71b14" containerName="registry-server" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.393597 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="f53d9f7c-fcef-4fb8-9196-50014c5b6285" containerName="container-00" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.394345 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.396903 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-gjkrl"/"default-dockercfg-ksv9r" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.585274 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-host\") pod \"crc-debug-zkr48\" (UID: \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\") " pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.585492 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25pfq\" (UniqueName: \"kubernetes.io/projected/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-kube-api-access-25pfq\") pod \"crc-debug-zkr48\" (UID: \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\") " pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.688716 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25pfq\" (UniqueName: \"kubernetes.io/projected/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-kube-api-access-25pfq\") pod \"crc-debug-zkr48\" (UID: \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\") " pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.689126 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-host\") pod \"crc-debug-zkr48\" (UID: \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\") " pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.689243 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-host\") pod \"crc-debug-zkr48\" (UID: \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\") " pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:05 crc kubenswrapper[4665]: I1205 02:35:05.717139 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25pfq\" (UniqueName: \"kubernetes.io/projected/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-kube-api-access-25pfq\") pod \"crc-debug-zkr48\" (UID: \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\") " pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:06 crc kubenswrapper[4665]: I1205 02:35:06.012850 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:06 crc kubenswrapper[4665]: I1205 02:35:06.065701 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/crc-debug-zkr48" event={"ID":"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469","Type":"ContainerStarted","Data":"f91f827f3234f58a0bc397a5a93594f85e284ffefaefa8ae9cb271f4a8e9551c"} Dec 05 02:35:07 crc kubenswrapper[4665]: I1205 02:35:07.075056 4665 generic.go:334] "Generic (PLEG): container finished" podID="3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469" containerID="1201ffe50d7b5cfaa5a4c3d97b44ac33e5451e5091546ecb4868181155a2dbca" exitCode=0 Dec 05 02:35:07 crc kubenswrapper[4665]: I1205 02:35:07.075215 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/crc-debug-zkr48" event={"ID":"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469","Type":"ContainerDied","Data":"1201ffe50d7b5cfaa5a4c3d97b44ac33e5451e5091546ecb4868181155a2dbca"} Dec 05 02:35:08 crc kubenswrapper[4665]: I1205 02:35:08.171202 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:08 crc kubenswrapper[4665]: I1205 02:35:08.245843 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25pfq\" (UniqueName: \"kubernetes.io/projected/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-kube-api-access-25pfq\") pod \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\" (UID: \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\") " Dec 05 02:35:08 crc kubenswrapper[4665]: I1205 02:35:08.246282 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-host\") pod \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\" (UID: \"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469\") " Dec 05 02:35:08 crc kubenswrapper[4665]: I1205 02:35:08.246369 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-host" (OuterVolumeSpecName: "host") pod "3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469" (UID: "3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 02:35:08 crc kubenswrapper[4665]: I1205 02:35:08.246866 4665 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-host\") on node \"crc\" DevicePath \"\"" Dec 05 02:35:08 crc kubenswrapper[4665]: I1205 02:35:08.271527 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-kube-api-access-25pfq" (OuterVolumeSpecName: "kube-api-access-25pfq") pod "3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469" (UID: "3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469"). InnerVolumeSpecName "kube-api-access-25pfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:35:08 crc kubenswrapper[4665]: I1205 02:35:08.348646 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25pfq\" (UniqueName: \"kubernetes.io/projected/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469-kube-api-access-25pfq\") on node \"crc\" DevicePath \"\"" Dec 05 02:35:09 crc kubenswrapper[4665]: I1205 02:35:09.097975 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/crc-debug-zkr48" event={"ID":"3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469","Type":"ContainerDied","Data":"f91f827f3234f58a0bc397a5a93594f85e284ffefaefa8ae9cb271f4a8e9551c"} Dec 05 02:35:09 crc kubenswrapper[4665]: I1205 02:35:09.098496 4665 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f91f827f3234f58a0bc397a5a93594f85e284ffefaefa8ae9cb271f4a8e9551c" Dec 05 02:35:09 crc kubenswrapper[4665]: I1205 02:35:09.098057 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-zkr48" Dec 05 02:35:09 crc kubenswrapper[4665]: I1205 02:35:09.238448 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-zkr48"] Dec 05 02:35:09 crc kubenswrapper[4665]: I1205 02:35:09.247459 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-zkr48"] Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.405814 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-mxqgp"] Dec 05 02:35:10 crc kubenswrapper[4665]: E1205 02:35:10.415095 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469" containerName="container-00" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.415141 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469" containerName="container-00" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.415485 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469" containerName="container-00" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.416465 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.419105 4665 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-gjkrl"/"default-dockercfg-ksv9r" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.583695 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f4zgp\" (UniqueName: \"kubernetes.io/projected/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-kube-api-access-f4zgp\") pod \"crc-debug-mxqgp\" (UID: \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\") " pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.584006 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-host\") pod \"crc-debug-mxqgp\" (UID: \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\") " pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.686114 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f4zgp\" (UniqueName: \"kubernetes.io/projected/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-kube-api-access-f4zgp\") pod \"crc-debug-mxqgp\" (UID: \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\") " pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.686492 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-host\") pod \"crc-debug-mxqgp\" (UID: \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\") " pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.686605 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-host\") pod \"crc-debug-mxqgp\" (UID: \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\") " pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.706941 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f4zgp\" (UniqueName: \"kubernetes.io/projected/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-kube-api-access-f4zgp\") pod \"crc-debug-mxqgp\" (UID: \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\") " pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.740250 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:10 crc kubenswrapper[4665]: W1205 02:35:10.779133 4665 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc68a8f8c_3cbb_44e8_9d8e_f40a661068d2.slice/crio-cb0a301539b76461b97d471a74c2a7c3b6f9bf3fc1f9ff0a3305e8bf07ef32d9 WatchSource:0}: Error finding container cb0a301539b76461b97d471a74c2a7c3b6f9bf3fc1f9ff0a3305e8bf07ef32d9: Status 404 returned error can't find the container with id cb0a301539b76461b97d471a74c2a7c3b6f9bf3fc1f9ff0a3305e8bf07ef32d9 Dec 05 02:35:10 crc kubenswrapper[4665]: I1205 02:35:10.903124 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469" path="/var/lib/kubelet/pods/3cb9892c-ea4a-4bb9-b78b-bc75b9bdf469/volumes" Dec 05 02:35:11 crc kubenswrapper[4665]: I1205 02:35:11.137641 4665 generic.go:334] "Generic (PLEG): container finished" podID="c68a8f8c-3cbb-44e8-9d8e-f40a661068d2" containerID="9b7af96cc7b029bc8ec09006588f7021aed1837b0a174d18fbaf40e7afc5d126" exitCode=0 Dec 05 02:35:11 crc kubenswrapper[4665]: I1205 02:35:11.137726 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" event={"ID":"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2","Type":"ContainerDied","Data":"9b7af96cc7b029bc8ec09006588f7021aed1837b0a174d18fbaf40e7afc5d126"} Dec 05 02:35:11 crc kubenswrapper[4665]: I1205 02:35:11.137952 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" event={"ID":"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2","Type":"ContainerStarted","Data":"cb0a301539b76461b97d471a74c2a7c3b6f9bf3fc1f9ff0a3305e8bf07ef32d9"} Dec 05 02:35:11 crc kubenswrapper[4665]: I1205 02:35:11.179865 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-mxqgp"] Dec 05 02:35:11 crc kubenswrapper[4665]: I1205 02:35:11.191898 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gjkrl/crc-debug-mxqgp"] Dec 05 02:35:12 crc kubenswrapper[4665]: I1205 02:35:12.246154 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:12 crc kubenswrapper[4665]: I1205 02:35:12.431408 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-f4zgp\" (UniqueName: \"kubernetes.io/projected/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-kube-api-access-f4zgp\") pod \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\" (UID: \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\") " Dec 05 02:35:12 crc kubenswrapper[4665]: I1205 02:35:12.431487 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-host\") pod \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\" (UID: \"c68a8f8c-3cbb-44e8-9d8e-f40a661068d2\") " Dec 05 02:35:12 crc kubenswrapper[4665]: I1205 02:35:12.431597 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-host" (OuterVolumeSpecName: "host") pod "c68a8f8c-3cbb-44e8-9d8e-f40a661068d2" (UID: "c68a8f8c-3cbb-44e8-9d8e-f40a661068d2"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 02:35:12 crc kubenswrapper[4665]: I1205 02:35:12.432212 4665 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-host\") on node \"crc\" DevicePath \"\"" Dec 05 02:35:12 crc kubenswrapper[4665]: I1205 02:35:12.437021 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-kube-api-access-f4zgp" (OuterVolumeSpecName: "kube-api-access-f4zgp") pod "c68a8f8c-3cbb-44e8-9d8e-f40a661068d2" (UID: "c68a8f8c-3cbb-44e8-9d8e-f40a661068d2"). InnerVolumeSpecName "kube-api-access-f4zgp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:35:12 crc kubenswrapper[4665]: I1205 02:35:12.534312 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-f4zgp\" (UniqueName: \"kubernetes.io/projected/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2-kube-api-access-f4zgp\") on node \"crc\" DevicePath \"\"" Dec 05 02:35:12 crc kubenswrapper[4665]: I1205 02:35:12.905081 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c68a8f8c-3cbb-44e8-9d8e-f40a661068d2" path="/var/lib/kubelet/pods/c68a8f8c-3cbb-44e8-9d8e-f40a661068d2/volumes" Dec 05 02:35:13 crc kubenswrapper[4665]: I1205 02:35:13.155244 4665 scope.go:117] "RemoveContainer" containerID="9b7af96cc7b029bc8ec09006588f7021aed1837b0a174d18fbaf40e7afc5d126" Dec 05 02:35:13 crc kubenswrapper[4665]: I1205 02:35:13.155360 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/crc-debug-mxqgp" Dec 05 02:35:49 crc kubenswrapper[4665]: I1205 02:35:49.819639 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6f55bbcd96-6gf9l_db4c2bcc-14d3-4129-89d9-e25d6c01ef02/barbican-api/0.log" Dec 05 02:35:49 crc kubenswrapper[4665]: I1205 02:35:49.945820 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-6f55bbcd96-6gf9l_db4c2bcc-14d3-4129-89d9-e25d6c01ef02/barbican-api-log/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.142785 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d6596b4bb-7zqjr_6cf44f27-5007-4db9-8784-715bdef486a0/barbican-keystone-listener/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.181032 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-d6596b4bb-7zqjr_6cf44f27-5007-4db9-8784-715bdef486a0/barbican-keystone-listener-log/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.326799 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5bf7b484d7-782wq_9dd95469-e581-46aa-bbb2-c69214aa26c7/barbican-worker/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.416688 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-5bf7b484d7-782wq_9dd95469-e581-46aa-bbb2-c69214aa26c7/barbican-worker-log/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.618507 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-trxmg_dd675614-41e7-40e1-b09d-639e6ed250fb/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.724020 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_94d0e25f-bdca-4da9-80c5-b81bedbdd7cc/ceilometer-central-agent/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.748383 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_94d0e25f-bdca-4da9-80c5-b81bedbdd7cc/ceilometer-notification-agent/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.894875 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_94d0e25f-bdca-4da9-80c5-b81bedbdd7cc/proxy-httpd/0.log" Dec 05 02:35:50 crc kubenswrapper[4665]: I1205 02:35:50.956060 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_94d0e25f-bdca-4da9-80c5-b81bedbdd7cc/sg-core/0.log" Dec 05 02:35:51 crc kubenswrapper[4665]: I1205 02:35:51.058343 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_84d298f5-1785-45d9-8195-ae1ba82c398a/cinder-api/0.log" Dec 05 02:35:51 crc kubenswrapper[4665]: I1205 02:35:51.187826 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_84d298f5-1785-45d9-8195-ae1ba82c398a/cinder-api-log/0.log" Dec 05 02:35:51 crc kubenswrapper[4665]: I1205 02:35:51.276400 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_5d1a4169-4d66-47db-a16d-c3f77df4334a/cinder-scheduler/0.log" Dec 05 02:35:51 crc kubenswrapper[4665]: I1205 02:35:51.352028 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_5d1a4169-4d66-47db-a16d-c3f77df4334a/probe/0.log" Dec 05 02:35:51 crc kubenswrapper[4665]: I1205 02:35:51.497329 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-d8cqr_16df9956-9395-4412-92c6-9635bf23c681/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:51 crc kubenswrapper[4665]: I1205 02:35:51.664741 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-c6ws8_f6b83f4a-6910-44b3-9fca-b9b455cc3d97/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:51 crc kubenswrapper[4665]: I1205 02:35:51.726523 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-54ffdb7d8c-2jtfc_ce335393-a026-4267-b337-ca077b2461b8/init/0.log" Dec 05 02:35:51 crc kubenswrapper[4665]: I1205 02:35:51.968636 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-54ffdb7d8c-2jtfc_ce335393-a026-4267-b337-ca077b2461b8/init/0.log" Dec 05 02:35:52 crc kubenswrapper[4665]: I1205 02:35:52.017667 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-7mx69_a185d71d-c81e-4faf-8c7a-c31c2ee82f31/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:52 crc kubenswrapper[4665]: I1205 02:35:52.150412 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-54ffdb7d8c-2jtfc_ce335393-a026-4267-b337-ca077b2461b8/dnsmasq-dns/0.log" Dec 05 02:35:52 crc kubenswrapper[4665]: I1205 02:35:52.278745 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9/glance-httpd/0.log" Dec 05 02:35:52 crc kubenswrapper[4665]: I1205 02:35:52.304984 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_75bbfe87-57cb-41e4-a9b0-fcb657c9b2a9/glance-log/0.log" Dec 05 02:35:52 crc kubenswrapper[4665]: I1205 02:35:52.531204 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37c48d2d-df08-4684-8215-be918d17cdae/glance-httpd/0.log" Dec 05 02:35:52 crc kubenswrapper[4665]: I1205 02:35:52.616182 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_37c48d2d-df08-4684-8215-be918d17cdae/glance-log/0.log" Dec 05 02:35:52 crc kubenswrapper[4665]: I1205 02:35:52.788468 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-644f785f4-mslbg_64407a72-3fdf-450f-b5c0-913ee74bb437/horizon/1.log" Dec 05 02:35:52 crc kubenswrapper[4665]: I1205 02:35:52.875758 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-644f785f4-mslbg_64407a72-3fdf-450f-b5c0-913ee74bb437/horizon/0.log" Dec 05 02:35:53 crc kubenswrapper[4665]: I1205 02:35:53.198766 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-9ckgd_caadc290-7caf-4c1f-8a2e-4c2b275e572b/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:53 crc kubenswrapper[4665]: I1205 02:35:53.365191 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-644f785f4-mslbg_64407a72-3fdf-450f-b5c0-913ee74bb437/horizon-log/0.log" Dec 05 02:35:53 crc kubenswrapper[4665]: I1205 02:35:53.457135 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-5jtmp_3966caf7-b7a8-4bd6-b4e3-e8ccbc5fce3d/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:53 crc kubenswrapper[4665]: I1205 02:35:53.825489 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415001-s26ld_01aa0f4b-fd48-4f4e-985a-93c82ef5c3aa/keystone-cron/0.log" Dec 05 02:35:53 crc kubenswrapper[4665]: I1205 02:35:53.915496 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_f6efb211-6c0c-44d0-b0f9-ab5a529b4c4c/kube-state-metrics/0.log" Dec 05 02:35:53 crc kubenswrapper[4665]: I1205 02:35:53.930452 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-79f86c7bd7-c4mss_4d1f4f51-e293-418f-a305-a7699a6cb866/keystone-api/0.log" Dec 05 02:35:54 crc kubenswrapper[4665]: I1205 02:35:54.176749 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-cfcbd_386e08fe-2108-4139-af9d-94fbaa7b7b12/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:54 crc kubenswrapper[4665]: I1205 02:35:54.688104 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-srbsl_76df2ad8-3e10-41ac-aa00-bea04feee0b9/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:54 crc kubenswrapper[4665]: I1205 02:35:54.916962 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5b484f7985-8qkjq_c6a6180f-d384-4015-8bf3-6563123c2f6a/neutron-httpd/0.log" Dec 05 02:35:55 crc kubenswrapper[4665]: I1205 02:35:55.232233 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-5b484f7985-8qkjq_c6a6180f-d384-4015-8bf3-6563123c2f6a/neutron-api/0.log" Dec 05 02:35:56 crc kubenswrapper[4665]: I1205 02:35:56.050481 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_fe074ce6-2f02-47f2-9e94-5a910517f64d/nova-cell0-conductor-conductor/0.log" Dec 05 02:35:56 crc kubenswrapper[4665]: I1205 02:35:56.542265 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_632d0371-eca3-499e-8993-07e8025175d3/nova-cell1-conductor-conductor/0.log" Dec 05 02:35:56 crc kubenswrapper[4665]: I1205 02:35:56.780752 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_661570e0-6f0d-4fd7-8368-b4713af3da59/nova-api-api/0.log" Dec 05 02:35:56 crc kubenswrapper[4665]: I1205 02:35:56.808268 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_661570e0-6f0d-4fd7-8368-b4713af3da59/nova-api-log/0.log" Dec 05 02:35:57 crc kubenswrapper[4665]: I1205 02:35:57.323833 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_a32e8029-fd7e-4662-8fbe-b83deaea60c8/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 02:35:57 crc kubenswrapper[4665]: I1205 02:35:57.328663 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-85px8_31efb8b4-c179-4d28-b197-2803bef0c22e/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:35:57 crc kubenswrapper[4665]: I1205 02:35:57.592499 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_40f37057-0a9e-40c6-9b67-776cd9b19e54/nova-metadata-log/0.log" Dec 05 02:35:57 crc kubenswrapper[4665]: I1205 02:35:57.983733 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6/mysql-bootstrap/0.log" Dec 05 02:35:58 crc kubenswrapper[4665]: I1205 02:35:58.212680 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6/galera/0.log" Dec 05 02:35:58 crc kubenswrapper[4665]: I1205 02:35:58.213980 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_1c30f5a4-7646-4d9d-8ff9-17f6ffc0f6b6/mysql-bootstrap/0.log" Dec 05 02:35:58 crc kubenswrapper[4665]: I1205 02:35:58.302651 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_165ee87a-9845-4f0c-b291-9d5fe6a1bdc5/nova-scheduler-scheduler/0.log" Dec 05 02:35:58 crc kubenswrapper[4665]: I1205 02:35:58.510405 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_423b314f-ea29-4705-8447-3a316edd8c6b/mysql-bootstrap/0.log" Dec 05 02:35:58 crc kubenswrapper[4665]: I1205 02:35:58.903051 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_423b314f-ea29-4705-8447-3a316edd8c6b/mysql-bootstrap/0.log" Dec 05 02:35:58 crc kubenswrapper[4665]: I1205 02:35:58.925450 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_423b314f-ea29-4705-8447-3a316edd8c6b/galera/0.log" Dec 05 02:35:59 crc kubenswrapper[4665]: I1205 02:35:59.058430 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_a8461ae3-f75f-42de-b320-c9dc4b1545ec/openstackclient/0.log" Dec 05 02:35:59 crc kubenswrapper[4665]: I1205 02:35:59.252521 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-ts2h8_e65a93ed-16e2-4cf7-a295-a3517e553335/openstack-network-exporter/0.log" Dec 05 02:35:59 crc kubenswrapper[4665]: I1205 02:35:59.525693 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7xcgj_ca9ae867-4d7f-4a30-ab90-ba5113fb9029/ovsdb-server-init/0.log" Dec 05 02:35:59 crc kubenswrapper[4665]: I1205 02:35:59.756518 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_40f37057-0a9e-40c6-9b67-776cd9b19e54/nova-metadata-metadata/0.log" Dec 05 02:35:59 crc kubenswrapper[4665]: I1205 02:35:59.765425 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7xcgj_ca9ae867-4d7f-4a30-ab90-ba5113fb9029/ovsdb-server-init/0.log" Dec 05 02:35:59 crc kubenswrapper[4665]: I1205 02:35:59.900606 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7xcgj_ca9ae867-4d7f-4a30-ab90-ba5113fb9029/ovsdb-server/0.log" Dec 05 02:35:59 crc kubenswrapper[4665]: I1205 02:35:59.934842 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-7xcgj_ca9ae867-4d7f-4a30-ab90-ba5113fb9029/ovs-vswitchd/0.log" Dec 05 02:36:00 crc kubenswrapper[4665]: I1205 02:36:00.105082 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-zr2m2_41de0d44-b33f-43c4-a6c1-54830596874b/ovn-controller/0.log" Dec 05 02:36:00 crc kubenswrapper[4665]: I1205 02:36:00.531584 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-dxlht_21b83a7a-3186-47f2-851a-b65efe2348a8/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:36:00 crc kubenswrapper[4665]: I1205 02:36:00.741197 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_acb7355b-b799-4b12-a8f1-75dd2439696f/openstack-network-exporter/0.log" Dec 05 02:36:00 crc kubenswrapper[4665]: I1205 02:36:00.831221 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_acb7355b-b799-4b12-a8f1-75dd2439696f/ovn-northd/0.log" Dec 05 02:36:01 crc kubenswrapper[4665]: I1205 02:36:01.057218 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_df5300cc-4ce3-4574-a775-595607aeddb6/openstack-network-exporter/0.log" Dec 05 02:36:01 crc kubenswrapper[4665]: I1205 02:36:01.191792 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_df5300cc-4ce3-4574-a775-595607aeddb6/ovsdbserver-nb/0.log" Dec 05 02:36:01 crc kubenswrapper[4665]: I1205 02:36:01.336433 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_68f1be10-e014-492c-9fb2-f6131ee209d4/openstack-network-exporter/0.log" Dec 05 02:36:01 crc kubenswrapper[4665]: I1205 02:36:01.380878 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_68f1be10-e014-492c-9fb2-f6131ee209d4/ovsdbserver-sb/0.log" Dec 05 02:36:01 crc kubenswrapper[4665]: I1205 02:36:01.932256 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7c79d9c44-5ps46_52283875-2314-426a-b5ff-77a8b000f4cc/placement-api/0.log" Dec 05 02:36:01 crc kubenswrapper[4665]: I1205 02:36:01.944490 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b5dc1946-d77e-4106-9350-326f32a2ae55/setup-container/0.log" Dec 05 02:36:02 crc kubenswrapper[4665]: I1205 02:36:02.131113 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-7c79d9c44-5ps46_52283875-2314-426a-b5ff-77a8b000f4cc/placement-log/0.log" Dec 05 02:36:02 crc kubenswrapper[4665]: I1205 02:36:02.268732 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b5dc1946-d77e-4106-9350-326f32a2ae55/rabbitmq/0.log" Dec 05 02:36:02 crc kubenswrapper[4665]: I1205 02:36:02.404581 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_b5dc1946-d77e-4106-9350-326f32a2ae55/setup-container/0.log" Dec 05 02:36:02 crc kubenswrapper[4665]: I1205 02:36:02.541791 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7a8135c5-ab50-4a2b-895a-7976da8b5bee/setup-container/0.log" Dec 05 02:36:02 crc kubenswrapper[4665]: I1205 02:36:02.786146 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7a8135c5-ab50-4a2b-895a-7976da8b5bee/rabbitmq/0.log" Dec 05 02:36:02 crc kubenswrapper[4665]: I1205 02:36:02.875672 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_7a8135c5-ab50-4a2b-895a-7976da8b5bee/setup-container/0.log" Dec 05 02:36:02 crc kubenswrapper[4665]: I1205 02:36:02.926934 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-nnx77_e27c2d20-292e-4f38-8fb9-8addf5cb5ebf/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:36:03 crc kubenswrapper[4665]: I1205 02:36:03.190523 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-zd7x2_f3777f3d-f5e6-479d-947b-baf234749487/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:36:03 crc kubenswrapper[4665]: I1205 02:36:03.197883 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-4wjpm_61298cb7-8b67-4f94-bd96-ee4ec8189d00/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:36:03 crc kubenswrapper[4665]: I1205 02:36:03.479321 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-8fk25_66505ea7-937f-4f07-b036-afca1adc368c/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:36:03 crc kubenswrapper[4665]: I1205 02:36:03.555655 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-nx4nq_0e47d679-dd01-454e-8ae4-887d6d676d15/ssh-known-hosts-edpm-deployment/0.log" Dec 05 02:36:03 crc kubenswrapper[4665]: I1205 02:36:03.834980 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5f4f69cd4c-jp87r_5cf42108-5fbb-4bb1-b941-b2181f99fa5c/proxy-server/0.log" Dec 05 02:36:03 crc kubenswrapper[4665]: I1205 02:36:03.997195 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5f4f69cd4c-jp87r_5cf42108-5fbb-4bb1-b941-b2181f99fa5c/proxy-httpd/0.log" Dec 05 02:36:04 crc kubenswrapper[4665]: I1205 02:36:04.243509 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-zlrjx_796fddd5-4127-4632-8728-406e29348c74/swift-ring-rebalance/0.log" Dec 05 02:36:04 crc kubenswrapper[4665]: I1205 02:36:04.611599 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/account-replicator/0.log" Dec 05 02:36:04 crc kubenswrapper[4665]: I1205 02:36:04.624892 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/account-reaper/0.log" Dec 05 02:36:04 crc kubenswrapper[4665]: I1205 02:36:04.625104 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/account-auditor/0.log" Dec 05 02:36:04 crc kubenswrapper[4665]: I1205 02:36:04.863415 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/account-server/0.log" Dec 05 02:36:04 crc kubenswrapper[4665]: I1205 02:36:04.920076 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/container-auditor/0.log" Dec 05 02:36:04 crc kubenswrapper[4665]: I1205 02:36:04.985453 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/container-server/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.059665 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/container-replicator/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.087379 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/container-updater/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.256973 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-expirer/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.259630 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-auditor/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.387065 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-replicator/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.398240 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-server/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.815647 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/object-updater/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.883985 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/swift-recon-cron/0.log" Dec 05 02:36:05 crc kubenswrapper[4665]: I1205 02:36:05.899637 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_d2bb1abb-06b0-416c-94fb-6afcbb0c8c3f/rsync/0.log" Dec 05 02:36:06 crc kubenswrapper[4665]: I1205 02:36:06.258391 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-hrk76_820a4267-c307-42cb-96cb-482a2919cfe7/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:36:06 crc kubenswrapper[4665]: I1205 02:36:06.309934 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_a3dcea46-0cf1-441d-84ba-0b327c396844/tempest-tests-tempest-tests-runner/0.log" Dec 05 02:36:06 crc kubenswrapper[4665]: I1205 02:36:06.483063 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_cfdb58d0-875d-49d7-82ef-13bc4785a25c/test-operator-logs-container/0.log" Dec 05 02:36:06 crc kubenswrapper[4665]: I1205 02:36:06.523119 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-d9qj6_61d15914-07c6-4782-b8e2-96ec816206fb/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 02:36:15 crc kubenswrapper[4665]: I1205 02:36:15.614655 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_02847117-544d-400b-b9a0-4d10200e0f0d/memcached/0.log" Dec 05 02:36:34 crc kubenswrapper[4665]: I1205 02:36:34.522526 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/util/0.log" Dec 05 02:36:34 crc kubenswrapper[4665]: I1205 02:36:34.712594 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/util/0.log" Dec 05 02:36:34 crc kubenswrapper[4665]: I1205 02:36:34.751544 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/pull/0.log" Dec 05 02:36:34 crc kubenswrapper[4665]: I1205 02:36:34.811288 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/pull/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.018350 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/util/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.081147 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/extract/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.107800 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_58c134150ba87862a072ee99906936458cca3557b039fbc67862da7cff9vhjb_3fc63545-1530-48cd-a790-f36d5cd5d73c/pull/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.482790 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-8hblt_fa7a4766-c028-4114-b979-a7900e21103c/kube-rbac-proxy/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.551130 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-8hblt_fa7a4766-c028-4114-b979-a7900e21103c/manager/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.659739 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-ptwdk_669a406d-6e51-4ead-89ff-4a1df7cb7308/kube-rbac-proxy/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.770540 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-ptwdk_669a406d-6e51-4ead-89ff-4a1df7cb7308/manager/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.788478 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-6bsqf_c8e9557a-3433-413b-a5b2-9137f2b9c584/kube-rbac-proxy/0.log" Dec 05 02:36:35 crc kubenswrapper[4665]: I1205 02:36:35.875376 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-6bsqf_c8e9557a-3433-413b-a5b2-9137f2b9c584/manager/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.034615 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-ks65z_277c6945-0cbb-4a0a-8e22-e990d76da759/kube-rbac-proxy/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.070891 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-ks65z_277c6945-0cbb-4a0a-8e22-e990d76da759/manager/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.250159 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-dz2g4_93b5a195-1f87-4eb3-8511-916e652b3913/kube-rbac-proxy/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.280466 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-dz2g4_93b5a195-1f87-4eb3-8511-916e652b3913/manager/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.397748 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q6zxf_8bd45d32-e93b-415b-a885-3926454418c9/kube-rbac-proxy/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.517568 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-q6zxf_8bd45d32-e93b-415b-a885-3926454418c9/manager/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.584792 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-lt2sv_68e17c62-dc97-4bf6-b9de-340e03d5cbda/kube-rbac-proxy/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.740040 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-9pbl9_3a85872e-618d-4847-aae0-1eb366f16003/kube-rbac-proxy/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.771558 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-57548d458d-lt2sv_68e17c62-dc97-4bf6-b9de-340e03d5cbda/manager/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.841399 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-9pbl9_3a85872e-618d-4847-aae0-1eb366f16003/manager/0.log" Dec 05 02:36:36 crc kubenswrapper[4665]: I1205 02:36:36.975462 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-nfmtb_5c1d9b52-7fcd-4615-9faa-af55e4165ffb/kube-rbac-proxy/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.071325 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-nfmtb_5c1d9b52-7fcd-4615-9faa-af55e4165ffb/manager/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.216436 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-dxk89_1fd940e7-8adc-4859-a763-58d909016fd6/manager/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.221977 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-dxk89_1fd940e7-8adc-4859-a763-58d909016fd6/kube-rbac-proxy/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.381111 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-5qmj8_e7235695-da27-4f1d-afec-a6f2a3decc79/kube-rbac-proxy/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.451696 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-5qmj8_e7235695-da27-4f1d-afec-a6f2a3decc79/manager/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.578453 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-fbmtc_8d94649e-ea57-4b1a-9fb0-2b37b567cd77/kube-rbac-proxy/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.792031 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-fgjvz_fb8b497b-5207-408c-9e30-e7169c4ccede/kube-rbac-proxy/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.936271 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-fbmtc_8d94649e-ea57-4b1a-9fb0-2b37b567cd77/manager/0.log" Dec 05 02:36:37 crc kubenswrapper[4665]: I1205 02:36:37.968116 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-fgjvz_fb8b497b-5207-408c-9e30-e7169c4ccede/manager/0.log" Dec 05 02:36:38 crc kubenswrapper[4665]: I1205 02:36:38.140109 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-dnqxs_764119ea-4228-4141-a7a7-faee0be8d052/kube-rbac-proxy/0.log" Dec 05 02:36:38 crc kubenswrapper[4665]: I1205 02:36:38.176582 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-dnqxs_764119ea-4228-4141-a7a7-faee0be8d052/manager/0.log" Dec 05 02:36:38 crc kubenswrapper[4665]: I1205 02:36:38.316980 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9_77dd89d3-29be-4d9c-ad35-a6281d6bd57f/kube-rbac-proxy/0.log" Dec 05 02:36:38 crc kubenswrapper[4665]: I1205 02:36:38.345555 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4j9zg9_77dd89d3-29be-4d9c-ad35-a6281d6bd57f/manager/0.log" Dec 05 02:36:38 crc kubenswrapper[4665]: I1205 02:36:38.834252 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-6f79d9dccc-zkrtn_119e3b58-5f63-441a-b2d9-9ea2e83df2b8/operator/0.log" Dec 05 02:36:38 crc kubenswrapper[4665]: I1205 02:36:38.875969 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-pwd7r_68ae69df-a135-4ab5-b79c-47268f37c17f/registry-server/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.117255 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-nmpk2_c874c72f-9ac0-4ce5-bf5c-fc9e983b725c/kube-rbac-proxy/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.274310 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-nmpk2_c874c72f-9ac0-4ce5-bf5c-fc9e983b725c/manager/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.462179 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fstjd_8543ed45-b6f4-4f54-bc94-756bf6f031e6/manager/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.523167 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-79966545b7-vd7h4_afada5e1-db62-40f7-b5a9-1c36f42670d4/manager/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.547233 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-fstjd_8543ed45-b6f4-4f54-bc94-756bf6f031e6/kube-rbac-proxy/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.611959 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-dmtqm_7bafbafa-4235-456c-a2b3-7990ad3f14e2/operator/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.774906 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-dlncm_3933c0fc-ca36-42a1-b418-9db281576617/kube-rbac-proxy/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.780137 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-dlncm_3933c0fc-ca36-42a1-b418-9db281576617/manager/0.log" Dec 05 02:36:39 crc kubenswrapper[4665]: I1205 02:36:39.821593 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-6dh9j_f353bd63-e193-4a26-8ba6-32f1eec034a8/kube-rbac-proxy/0.log" Dec 05 02:36:40 crc kubenswrapper[4665]: I1205 02:36:40.031238 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-bk8hv_0c289bf9-999d-4396-b15b-b27fded35180/kube-rbac-proxy/0.log" Dec 05 02:36:40 crc kubenswrapper[4665]: I1205 02:36:40.091121 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-bk8hv_0c289bf9-999d-4396-b15b-b27fded35180/manager/0.log" Dec 05 02:36:40 crc kubenswrapper[4665]: I1205 02:36:40.095381 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-6dh9j_f353bd63-e193-4a26-8ba6-32f1eec034a8/manager/0.log" Dec 05 02:36:40 crc kubenswrapper[4665]: I1205 02:36:40.252177 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-sbnx4_5009fa32-5a01-46dc-9238-2f3c8ef7fddc/manager/0.log" Dec 05 02:36:40 crc kubenswrapper[4665]: I1205 02:36:40.300093 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-sbnx4_5009fa32-5a01-46dc-9238-2f3c8ef7fddc/kube-rbac-proxy/0.log" Dec 05 02:36:59 crc kubenswrapper[4665]: I1205 02:36:59.782280 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-k5gsl_f20fec7f-c7bb-4bb5-b86c-076b8931aa97/control-plane-machine-set-operator/0.log" Dec 05 02:36:59 crc kubenswrapper[4665]: I1205 02:36:59.953676 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-42qzk_cfa776fb-8b8d-4cd1-941a-57e2672afdeb/kube-rbac-proxy/0.log" Dec 05 02:37:00 crc kubenswrapper[4665]: I1205 02:37:00.025114 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-42qzk_cfa776fb-8b8d-4cd1-941a-57e2672afdeb/machine-api-operator/0.log" Dec 05 02:37:12 crc kubenswrapper[4665]: I1205 02:37:12.763629 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-hdlpm_5f586972-9546-4a85-a442-590c70b38de3/cert-manager-controller/0.log" Dec 05 02:37:12 crc kubenswrapper[4665]: I1205 02:37:12.902596 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-pv2mm_7b226a2a-8fb6-4694-a1a7-9a86e3d222e9/cert-manager-cainjector/0.log" Dec 05 02:37:13 crc kubenswrapper[4665]: I1205 02:37:13.033502 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-2czfg_7af985e3-148d-4974-b2ea-d9679063234a/cert-manager-webhook/0.log" Dec 05 02:37:14 crc kubenswrapper[4665]: I1205 02:37:14.922587 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:37:14 crc kubenswrapper[4665]: I1205 02:37:14.922889 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:37:24 crc kubenswrapper[4665]: I1205 02:37:24.986725 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-dp69l_0c556144-28be-4719-91ae-78d016ea7d7a/nmstate-console-plugin/0.log" Dec 05 02:37:25 crc kubenswrapper[4665]: I1205 02:37:25.163076 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-glrwt_faa11fdb-cf1b-48a0-87b0-d40430440a73/nmstate-handler/0.log" Dec 05 02:37:25 crc kubenswrapper[4665]: I1205 02:37:25.261998 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-hgsvm_0e886560-4686-45f7-b50d-c0544fc53448/kube-rbac-proxy/0.log" Dec 05 02:37:25 crc kubenswrapper[4665]: I1205 02:37:25.347352 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-hgsvm_0e886560-4686-45f7-b50d-c0544fc53448/nmstate-metrics/0.log" Dec 05 02:37:25 crc kubenswrapper[4665]: I1205 02:37:25.418679 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-7mmkk_1eb252f2-27b1-4cd9-be84-4183313f0710/nmstate-operator/0.log" Dec 05 02:37:25 crc kubenswrapper[4665]: I1205 02:37:25.590639 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-jgwh9_e3ec97f0-b128-4247-aa81-c51298bd148c/nmstate-webhook/0.log" Dec 05 02:37:43 crc kubenswrapper[4665]: I1205 02:37:43.333166 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-dw2lj_b1652627-99c3-4670-84ec-c770bf76a4b4/kube-rbac-proxy/0.log" Dec 05 02:37:43 crc kubenswrapper[4665]: I1205 02:37:43.373964 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-dw2lj_b1652627-99c3-4670-84ec-c770bf76a4b4/controller/0.log" Dec 05 02:37:43 crc kubenswrapper[4665]: I1205 02:37:43.525397 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-frr-files/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.079701 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-reloader/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.080208 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-frr-files/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.121880 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-metrics/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.127104 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-reloader/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.294394 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-frr-files/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.319376 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-reloader/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.334007 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-metrics/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.385851 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-metrics/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.588505 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-reloader/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.657446 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-frr-files/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.663778 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/cp-metrics/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.686056 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/controller/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.902364 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/frr-metrics/0.log" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.922046 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.922096 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:37:44 crc kubenswrapper[4665]: I1205 02:37:44.977826 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/kube-rbac-proxy-frr/0.log" Dec 05 02:37:45 crc kubenswrapper[4665]: I1205 02:37:45.044381 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/kube-rbac-proxy/0.log" Dec 05 02:37:45 crc kubenswrapper[4665]: I1205 02:37:45.216059 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/reloader/0.log" Dec 05 02:37:45 crc kubenswrapper[4665]: I1205 02:37:45.280663 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-pchdx_f444b2fb-024e-4f65-84cc-4bc16a3cc6a9/frr-k8s-webhook-server/0.log" Dec 05 02:37:45 crc kubenswrapper[4665]: I1205 02:37:45.514377 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-84549bb967-wwjmg_c3dedfa3-52da-4bbc-b080-ce01610f9152/manager/0.log" Dec 05 02:37:45 crc kubenswrapper[4665]: I1205 02:37:45.879146 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-6868996d58-rgvnq_067086f8-f82e-45c4-a7dd-79cacf3192e5/webhook-server/0.log" Dec 05 02:37:45 crc kubenswrapper[4665]: I1205 02:37:45.901213 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-p5jk7_308f300a-520c-4c85-9cbd-dac3c432bdc1/frr/0.log" Dec 05 02:37:45 crc kubenswrapper[4665]: I1205 02:37:45.904033 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wxh7x_44aa572b-ef2b-4b3a-83ef-9a45cfd73067/kube-rbac-proxy/0.log" Dec 05 02:37:46 crc kubenswrapper[4665]: I1205 02:37:46.311694 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-wxh7x_44aa572b-ef2b-4b3a-83ef-9a45cfd73067/speaker/0.log" Dec 05 02:37:58 crc kubenswrapper[4665]: I1205 02:37:58.845703 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/util/0.log" Dec 05 02:37:58 crc kubenswrapper[4665]: I1205 02:37:58.984814 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/util/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.008322 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/pull/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.132871 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/pull/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.462713 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/util/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.535612 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/extract/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.614441 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fltkwn_3d279a3e-be32-4a4d-bf07-54cb8f3b2efa/pull/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.739578 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/util/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.902207 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/util/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.961376 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/pull/0.log" Dec 05 02:37:59 crc kubenswrapper[4665]: I1205 02:37:59.983992 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/pull/0.log" Dec 05 02:38:00 crc kubenswrapper[4665]: I1205 02:38:00.213657 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/util/0.log" Dec 05 02:38:00 crc kubenswrapper[4665]: I1205 02:38:00.219843 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/extract/0.log" Dec 05 02:38:00 crc kubenswrapper[4665]: I1205 02:38:00.296262 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83dqs9g_3dd46a4d-4cea-4ef4-8b4a-e78d451055a3/pull/0.log" Dec 05 02:38:00 crc kubenswrapper[4665]: I1205 02:38:00.480607 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-utilities/0.log" Dec 05 02:38:00 crc kubenswrapper[4665]: I1205 02:38:00.717568 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-content/0.log" Dec 05 02:38:00 crc kubenswrapper[4665]: I1205 02:38:00.771683 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-utilities/0.log" Dec 05 02:38:00 crc kubenswrapper[4665]: I1205 02:38:00.777087 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-content/0.log" Dec 05 02:38:01 crc kubenswrapper[4665]: I1205 02:38:01.067350 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-content/0.log" Dec 05 02:38:01 crc kubenswrapper[4665]: I1205 02:38:01.351840 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/extract-utilities/0.log" Dec 05 02:38:01 crc kubenswrapper[4665]: I1205 02:38:01.517036 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-utilities/0.log" Dec 05 02:38:01 crc kubenswrapper[4665]: I1205 02:38:01.771418 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-content/0.log" Dec 05 02:38:01 crc kubenswrapper[4665]: I1205 02:38:01.874268 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-content/0.log" Dec 05 02:38:01 crc kubenswrapper[4665]: I1205 02:38:01.880405 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-utilities/0.log" Dec 05 02:38:02 crc kubenswrapper[4665]: I1205 02:38:02.079010 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-pkwjd_0995d7c1-230b-46a5-9136-c644fa9faf86/registry-server/0.log" Dec 05 02:38:02 crc kubenswrapper[4665]: I1205 02:38:02.203339 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-content/0.log" Dec 05 02:38:02 crc kubenswrapper[4665]: I1205 02:38:02.211422 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/extract-utilities/0.log" Dec 05 02:38:02 crc kubenswrapper[4665]: I1205 02:38:02.460771 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-6lq6k_8b497962-196d-41aa-aacc-1d68536dfec6/marketplace-operator/0.log" Dec 05 02:38:02 crc kubenswrapper[4665]: I1205 02:38:02.787923 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-utilities/0.log" Dec 05 02:38:02 crc kubenswrapper[4665]: I1205 02:38:02.927014 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-9dz8h_9182b0cf-85af-4df8-81d6-5f1f407631ac/registry-server/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.062860 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-content/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.104916 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-utilities/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.124862 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-content/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.339338 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-content/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.345953 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/extract-utilities/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.461322 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-ldv79_7f4973ad-12ad-421c-b68f-9b47206f2e2e/registry-server/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.567208 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-utilities/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.833672 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-content/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.856025 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-content/0.log" Dec 05 02:38:03 crc kubenswrapper[4665]: I1205 02:38:03.856373 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-utilities/0.log" Dec 05 02:38:04 crc kubenswrapper[4665]: I1205 02:38:04.011155 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-utilities/0.log" Dec 05 02:38:04 crc kubenswrapper[4665]: I1205 02:38:04.012587 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/extract-content/0.log" Dec 05 02:38:04 crc kubenswrapper[4665]: I1205 02:38:04.632387 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-tlrk9_3e6ec612-5d6b-4431-ba68-690a5c6c9c2a/registry-server/0.log" Dec 05 02:38:14 crc kubenswrapper[4665]: I1205 02:38:14.922770 4665 patch_prober.go:28] interesting pod/machine-config-daemon-rgbtc container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 02:38:14 crc kubenswrapper[4665]: I1205 02:38:14.923381 4665 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 02:38:14 crc kubenswrapper[4665]: I1205 02:38:14.923430 4665 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" Dec 05 02:38:14 crc kubenswrapper[4665]: I1205 02:38:14.924125 4665 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec"} pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 02:38:14 crc kubenswrapper[4665]: I1205 02:38:14.924173 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerName="machine-config-daemon" containerID="cri-o://53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" gracePeriod=600 Dec 05 02:38:15 crc kubenswrapper[4665]: E1205 02:38:15.047252 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:38:15 crc kubenswrapper[4665]: I1205 02:38:15.611761 4665 generic.go:334] "Generic (PLEG): container finished" podID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" exitCode=0 Dec 05 02:38:15 crc kubenswrapper[4665]: I1205 02:38:15.611803 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerDied","Data":"53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec"} Dec 05 02:38:15 crc kubenswrapper[4665]: I1205 02:38:15.611834 4665 scope.go:117] "RemoveContainer" containerID="dcc5733d6b54f78f22af95e1b175a0eefa4864e3dc5b4fc934fb623d121ab0f0" Dec 05 02:38:15 crc kubenswrapper[4665]: I1205 02:38:15.612479 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:38:15 crc kubenswrapper[4665]: E1205 02:38:15.612734 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:38:28 crc kubenswrapper[4665]: I1205 02:38:28.893213 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:38:28 crc kubenswrapper[4665]: E1205 02:38:28.893900 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:38:37 crc kubenswrapper[4665]: E1205 02:38:37.993048 4665 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.236:50006->38.102.83.236:32943: write tcp 38.102.83.236:50006->38.102.83.236:32943: write: broken pipe Dec 05 02:38:40 crc kubenswrapper[4665]: I1205 02:38:40.893941 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:38:40 crc kubenswrapper[4665]: E1205 02:38:40.894846 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:38:53 crc kubenswrapper[4665]: I1205 02:38:53.894027 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:38:53 crc kubenswrapper[4665]: E1205 02:38:53.894957 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:39:04 crc kubenswrapper[4665]: I1205 02:39:04.899052 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:39:04 crc kubenswrapper[4665]: E1205 02:39:04.899859 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:39:19 crc kubenswrapper[4665]: I1205 02:39:19.893126 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:39:19 crc kubenswrapper[4665]: E1205 02:39:19.894319 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:39:33 crc kubenswrapper[4665]: I1205 02:39:33.894524 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:39:33 crc kubenswrapper[4665]: E1205 02:39:33.895406 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:39:46 crc kubenswrapper[4665]: I1205 02:39:46.893861 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:39:46 crc kubenswrapper[4665]: E1205 02:39:46.894496 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:40:00 crc kubenswrapper[4665]: I1205 02:40:00.897062 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:40:00 crc kubenswrapper[4665]: E1205 02:40:00.897838 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:40:12 crc kubenswrapper[4665]: I1205 02:40:12.898480 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:40:12 crc kubenswrapper[4665]: E1205 02:40:12.899138 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:40:21 crc kubenswrapper[4665]: I1205 02:40:21.812791 4665 generic.go:334] "Generic (PLEG): container finished" podID="067213a1-4227-401f-b02f-be7bb4adafd4" containerID="5f501d728b5de7d806a1e3722f6356a6b70e035cd98cc36b59fcbf7d3f93a544" exitCode=0 Dec 05 02:40:21 crc kubenswrapper[4665]: I1205 02:40:21.812918 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-gjkrl/must-gather-l4xct" event={"ID":"067213a1-4227-401f-b02f-be7bb4adafd4","Type":"ContainerDied","Data":"5f501d728b5de7d806a1e3722f6356a6b70e035cd98cc36b59fcbf7d3f93a544"} Dec 05 02:40:21 crc kubenswrapper[4665]: I1205 02:40:21.814251 4665 scope.go:117] "RemoveContainer" containerID="5f501d728b5de7d806a1e3722f6356a6b70e035cd98cc36b59fcbf7d3f93a544" Dec 05 02:40:22 crc kubenswrapper[4665]: I1205 02:40:22.832085 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gjkrl_must-gather-l4xct_067213a1-4227-401f-b02f-be7bb4adafd4/gather/0.log" Dec 05 02:40:24 crc kubenswrapper[4665]: I1205 02:40:24.904560 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:40:24 crc kubenswrapper[4665]: E1205 02:40:24.905367 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:40:34 crc kubenswrapper[4665]: I1205 02:40:34.785511 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-gjkrl/must-gather-l4xct"] Dec 05 02:40:34 crc kubenswrapper[4665]: I1205 02:40:34.786195 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-gjkrl/must-gather-l4xct" podUID="067213a1-4227-401f-b02f-be7bb4adafd4" containerName="copy" containerID="cri-o://d3f5c0f9a81c21f76cc2f6b8bd23c78c4044669df8fb1f587d76190d1a474bb7" gracePeriod=2 Dec 05 02:40:34 crc kubenswrapper[4665]: I1205 02:40:34.796049 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-gjkrl/must-gather-l4xct"] Dec 05 02:40:34 crc kubenswrapper[4665]: I1205 02:40:34.940969 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gjkrl_must-gather-l4xct_067213a1-4227-401f-b02f-be7bb4adafd4/copy/0.log" Dec 05 02:40:34 crc kubenswrapper[4665]: I1205 02:40:34.941612 4665 generic.go:334] "Generic (PLEG): container finished" podID="067213a1-4227-401f-b02f-be7bb4adafd4" containerID="d3f5c0f9a81c21f76cc2f6b8bd23c78c4044669df8fb1f587d76190d1a474bb7" exitCode=143 Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.231413 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gjkrl_must-gather-l4xct_067213a1-4227-401f-b02f-be7bb4adafd4/copy/0.log" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.231735 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.354274 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wc7ms\" (UniqueName: \"kubernetes.io/projected/067213a1-4227-401f-b02f-be7bb4adafd4-kube-api-access-wc7ms\") pod \"067213a1-4227-401f-b02f-be7bb4adafd4\" (UID: \"067213a1-4227-401f-b02f-be7bb4adafd4\") " Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.354498 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/067213a1-4227-401f-b02f-be7bb4adafd4-must-gather-output\") pod \"067213a1-4227-401f-b02f-be7bb4adafd4\" (UID: \"067213a1-4227-401f-b02f-be7bb4adafd4\") " Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.367364 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/067213a1-4227-401f-b02f-be7bb4adafd4-kube-api-access-wc7ms" (OuterVolumeSpecName: "kube-api-access-wc7ms") pod "067213a1-4227-401f-b02f-be7bb4adafd4" (UID: "067213a1-4227-401f-b02f-be7bb4adafd4"). InnerVolumeSpecName "kube-api-access-wc7ms". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.456542 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wc7ms\" (UniqueName: \"kubernetes.io/projected/067213a1-4227-401f-b02f-be7bb4adafd4-kube-api-access-wc7ms\") on node \"crc\" DevicePath \"\"" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.513967 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/067213a1-4227-401f-b02f-be7bb4adafd4-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "067213a1-4227-401f-b02f-be7bb4adafd4" (UID: "067213a1-4227-401f-b02f-be7bb4adafd4"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.558269 4665 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/067213a1-4227-401f-b02f-be7bb4adafd4-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.955152 4665 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-gjkrl_must-gather-l4xct_067213a1-4227-401f-b02f-be7bb4adafd4/copy/0.log" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.957254 4665 scope.go:117] "RemoveContainer" containerID="d3f5c0f9a81c21f76cc2f6b8bd23c78c4044669df8fb1f587d76190d1a474bb7" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.957319 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-gjkrl/must-gather-l4xct" Dec 05 02:40:35 crc kubenswrapper[4665]: I1205 02:40:35.982103 4665 scope.go:117] "RemoveContainer" containerID="5f501d728b5de7d806a1e3722f6356a6b70e035cd98cc36b59fcbf7d3f93a544" Dec 05 02:40:36 crc kubenswrapper[4665]: I1205 02:40:36.905611 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="067213a1-4227-401f-b02f-be7bb4adafd4" path="/var/lib/kubelet/pods/067213a1-4227-401f-b02f-be7bb4adafd4/volumes" Dec 05 02:40:38 crc kubenswrapper[4665]: I1205 02:40:38.894179 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:40:38 crc kubenswrapper[4665]: E1205 02:40:38.894780 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:40:51 crc kubenswrapper[4665]: I1205 02:40:51.893492 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:40:51 crc kubenswrapper[4665]: E1205 02:40:51.894267 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.702756 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-67wlk"] Dec 05 02:41:02 crc kubenswrapper[4665]: E1205 02:41:02.703981 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="067213a1-4227-401f-b02f-be7bb4adafd4" containerName="gather" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.703998 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="067213a1-4227-401f-b02f-be7bb4adafd4" containerName="gather" Dec 05 02:41:02 crc kubenswrapper[4665]: E1205 02:41:02.704023 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="067213a1-4227-401f-b02f-be7bb4adafd4" containerName="copy" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.704031 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="067213a1-4227-401f-b02f-be7bb4adafd4" containerName="copy" Dec 05 02:41:02 crc kubenswrapper[4665]: E1205 02:41:02.704049 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c68a8f8c-3cbb-44e8-9d8e-f40a661068d2" containerName="container-00" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.704058 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c68a8f8c-3cbb-44e8-9d8e-f40a661068d2" containerName="container-00" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.704307 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="067213a1-4227-401f-b02f-be7bb4adafd4" containerName="gather" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.704324 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="c68a8f8c-3cbb-44e8-9d8e-f40a661068d2" containerName="container-00" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.704342 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="067213a1-4227-401f-b02f-be7bb4adafd4" containerName="copy" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.705954 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.717957 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-67wlk"] Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.781775 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b82l8\" (UniqueName: \"kubernetes.io/projected/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-kube-api-access-b82l8\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.781836 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-catalog-content\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.781890 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-utilities\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.884236 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b82l8\" (UniqueName: \"kubernetes.io/projected/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-kube-api-access-b82l8\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.884563 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-catalog-content\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.884617 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-utilities\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.885023 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-catalog-content\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.885071 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-utilities\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:02 crc kubenswrapper[4665]: I1205 02:41:02.920204 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b82l8\" (UniqueName: \"kubernetes.io/projected/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-kube-api-access-b82l8\") pod \"redhat-operators-67wlk\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:03 crc kubenswrapper[4665]: I1205 02:41:03.058109 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:03 crc kubenswrapper[4665]: I1205 02:41:03.347983 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-67wlk"] Dec 05 02:41:03 crc kubenswrapper[4665]: I1205 02:41:03.893926 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:41:03 crc kubenswrapper[4665]: E1205 02:41:03.894523 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:41:04 crc kubenswrapper[4665]: I1205 02:41:04.213808 4665 generic.go:334] "Generic (PLEG): container finished" podID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerID="214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834" exitCode=0 Dec 05 02:41:04 crc kubenswrapper[4665]: I1205 02:41:04.213851 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67wlk" event={"ID":"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd","Type":"ContainerDied","Data":"214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834"} Dec 05 02:41:04 crc kubenswrapper[4665]: I1205 02:41:04.213877 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67wlk" event={"ID":"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd","Type":"ContainerStarted","Data":"e4410cff65942006429a5f167366cc560cd149d62ecc59ed3a25180b8cd799b8"} Dec 05 02:41:04 crc kubenswrapper[4665]: I1205 02:41:04.216548 4665 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 02:41:05 crc kubenswrapper[4665]: I1205 02:41:05.225942 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67wlk" event={"ID":"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd","Type":"ContainerStarted","Data":"770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5"} Dec 05 02:41:08 crc kubenswrapper[4665]: I1205 02:41:08.258393 4665 generic.go:334] "Generic (PLEG): container finished" podID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerID="770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5" exitCode=0 Dec 05 02:41:08 crc kubenswrapper[4665]: I1205 02:41:08.258519 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67wlk" event={"ID":"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd","Type":"ContainerDied","Data":"770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5"} Dec 05 02:41:09 crc kubenswrapper[4665]: I1205 02:41:09.271533 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67wlk" event={"ID":"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd","Type":"ContainerStarted","Data":"2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140"} Dec 05 02:41:09 crc kubenswrapper[4665]: I1205 02:41:09.292613 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-67wlk" podStartSLOduration=2.840940782 podStartE2EDuration="7.292594876s" podCreationTimestamp="2025-12-05 02:41:02 +0000 UTC" firstStartedPulling="2025-12-05 02:41:04.216169427 +0000 UTC m=+5439.555561736" lastFinishedPulling="2025-12-05 02:41:08.667823521 +0000 UTC m=+5444.007215830" observedRunningTime="2025-12-05 02:41:09.287619475 +0000 UTC m=+5444.627011774" watchObservedRunningTime="2025-12-05 02:41:09.292594876 +0000 UTC m=+5444.631987175" Dec 05 02:41:13 crc kubenswrapper[4665]: I1205 02:41:13.058879 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:13 crc kubenswrapper[4665]: I1205 02:41:13.059619 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:14 crc kubenswrapper[4665]: I1205 02:41:14.123720 4665 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-67wlk" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="registry-server" probeResult="failure" output=< Dec 05 02:41:14 crc kubenswrapper[4665]: timeout: failed to connect service ":50051" within 1s Dec 05 02:41:14 crc kubenswrapper[4665]: > Dec 05 02:41:18 crc kubenswrapper[4665]: I1205 02:41:18.893374 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:41:18 crc kubenswrapper[4665]: E1205 02:41:18.894040 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:41:23 crc kubenswrapper[4665]: I1205 02:41:23.106006 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:23 crc kubenswrapper[4665]: I1205 02:41:23.154552 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:23 crc kubenswrapper[4665]: I1205 02:41:23.345970 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-67wlk"] Dec 05 02:41:24 crc kubenswrapper[4665]: I1205 02:41:24.405041 4665 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-67wlk" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="registry-server" containerID="cri-o://2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140" gracePeriod=2 Dec 05 02:41:24 crc kubenswrapper[4665]: I1205 02:41:24.904808 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.036024 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-utilities\") pod \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.036109 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-catalog-content\") pod \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.036213 4665 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b82l8\" (UniqueName: \"kubernetes.io/projected/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-kube-api-access-b82l8\") pod \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\" (UID: \"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd\") " Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.037132 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-utilities" (OuterVolumeSpecName: "utilities") pod "c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" (UID: "c418c5a6-2d19-42d8-8ba7-980cedcaa8fd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.042176 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-kube-api-access-b82l8" (OuterVolumeSpecName: "kube-api-access-b82l8") pod "c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" (UID: "c418c5a6-2d19-42d8-8ba7-980cedcaa8fd"). InnerVolumeSpecName "kube-api-access-b82l8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.138714 4665 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.138743 4665 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b82l8\" (UniqueName: \"kubernetes.io/projected/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-kube-api-access-b82l8\") on node \"crc\" DevicePath \"\"" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.144651 4665 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" (UID: "c418c5a6-2d19-42d8-8ba7-980cedcaa8fd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.240570 4665 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.418864 4665 generic.go:334] "Generic (PLEG): container finished" podID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerID="2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140" exitCode=0 Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.418926 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67wlk" event={"ID":"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd","Type":"ContainerDied","Data":"2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140"} Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.419000 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-67wlk" event={"ID":"c418c5a6-2d19-42d8-8ba7-980cedcaa8fd","Type":"ContainerDied","Data":"e4410cff65942006429a5f167366cc560cd149d62ecc59ed3a25180b8cd799b8"} Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.419033 4665 scope.go:117] "RemoveContainer" containerID="2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.418942 4665 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-67wlk" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.464170 4665 scope.go:117] "RemoveContainer" containerID="770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.473131 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-67wlk"] Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.485907 4665 scope.go:117] "RemoveContainer" containerID="214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.486588 4665 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-67wlk"] Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.525709 4665 scope.go:117] "RemoveContainer" containerID="2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140" Dec 05 02:41:25 crc kubenswrapper[4665]: E1205 02:41:25.526378 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140\": container with ID starting with 2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140 not found: ID does not exist" containerID="2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.526423 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140"} err="failed to get container status \"2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140\": rpc error: code = NotFound desc = could not find container \"2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140\": container with ID starting with 2c8571cc17a26192cc88b7947646e872202665720445bdc3a4a2cd1734a87140 not found: ID does not exist" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.526449 4665 scope.go:117] "RemoveContainer" containerID="770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5" Dec 05 02:41:25 crc kubenswrapper[4665]: E1205 02:41:25.526725 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5\": container with ID starting with 770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5 not found: ID does not exist" containerID="770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.526753 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5"} err="failed to get container status \"770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5\": rpc error: code = NotFound desc = could not find container \"770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5\": container with ID starting with 770c00d3dae9261b0d8f695eff31a89332e03537d1e1a771c7d238ce234406c5 not found: ID does not exist" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.526797 4665 scope.go:117] "RemoveContainer" containerID="214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834" Dec 05 02:41:25 crc kubenswrapper[4665]: E1205 02:41:25.527149 4665 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834\": container with ID starting with 214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834 not found: ID does not exist" containerID="214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834" Dec 05 02:41:25 crc kubenswrapper[4665]: I1205 02:41:25.527171 4665 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834"} err="failed to get container status \"214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834\": rpc error: code = NotFound desc = could not find container \"214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834\": container with ID starting with 214c29550387d50c5c338a321eedc031793dd679e8fb5e2a25361cc0806d6834 not found: ID does not exist" Dec 05 02:41:26 crc kubenswrapper[4665]: I1205 02:41:26.903415 4665 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" path="/var/lib/kubelet/pods/c418c5a6-2d19-42d8-8ba7-980cedcaa8fd/volumes" Dec 05 02:41:30 crc kubenswrapper[4665]: I1205 02:41:30.893646 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:41:30 crc kubenswrapper[4665]: E1205 02:41:30.894516 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:41:40 crc kubenswrapper[4665]: I1205 02:41:40.243606 4665 scope.go:117] "RemoveContainer" containerID="1201ffe50d7b5cfaa5a4c3d97b44ac33e5451e5091546ecb4868181155a2dbca" Dec 05 02:41:42 crc kubenswrapper[4665]: I1205 02:41:42.894003 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:41:42 crc kubenswrapper[4665]: E1205 02:41:42.894943 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:41:57 crc kubenswrapper[4665]: I1205 02:41:57.901258 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:41:57 crc kubenswrapper[4665]: E1205 02:41:57.902130 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:42:10 crc kubenswrapper[4665]: I1205 02:42:10.894721 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:42:10 crc kubenswrapper[4665]: E1205 02:42:10.895843 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:42:22 crc kubenswrapper[4665]: I1205 02:42:22.893147 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:42:22 crc kubenswrapper[4665]: E1205 02:42:22.893920 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:42:34 crc kubenswrapper[4665]: I1205 02:42:34.901138 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:42:34 crc kubenswrapper[4665]: E1205 02:42:34.901954 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:42:45 crc kubenswrapper[4665]: I1205 02:42:45.894219 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:42:45 crc kubenswrapper[4665]: E1205 02:42:45.895028 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:43:00 crc kubenswrapper[4665]: I1205 02:43:00.894365 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:43:00 crc kubenswrapper[4665]: E1205 02:43:00.895148 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:43:11 crc kubenswrapper[4665]: I1205 02:43:11.893788 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:43:11 crc kubenswrapper[4665]: E1205 02:43:11.894724 4665 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-rgbtc_openshift-machine-config-operator(dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14)\"" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" podUID="dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14" Dec 05 02:43:22 crc kubenswrapper[4665]: I1205 02:43:22.893456 4665 scope.go:117] "RemoveContainer" containerID="53b6fd71ab21e93cb43b3ff94e418f9a823245df654f15aa8d5880f16e96cfec" Dec 05 02:43:23 crc kubenswrapper[4665]: I1205 02:43:23.517462 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-rgbtc" event={"ID":"dbc5a2ac-b7ae-4f75-bdec-f4f0f4948a14","Type":"ContainerStarted","Data":"f0e6dd524317cdde4a7746c7bb943f0ac8c957c9d935da15a77eb2c285b10b2f"} Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.042946 4665 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-24c4n"] Dec 05 02:44:18 crc kubenswrapper[4665]: E1205 02:44:18.044034 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="extract-content" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.044051 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="extract-content" Dec 05 02:44:18 crc kubenswrapper[4665]: E1205 02:44:18.044074 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="extract-utilities" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.044083 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="extract-utilities" Dec 05 02:44:18 crc kubenswrapper[4665]: E1205 02:44:18.044096 4665 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="registry-server" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.044103 4665 state_mem.go:107] "Deleted CPUSet assignment" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="registry-server" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.044380 4665 memory_manager.go:354] "RemoveStaleState removing state" podUID="c418c5a6-2d19-42d8-8ba7-980cedcaa8fd" containerName="registry-server" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.055571 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-24c4n"] Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.055681 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.208381 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zp2td\" (UniqueName: \"kubernetes.io/projected/b1851dcf-8208-4b33-beda-ad403bd515dc-kube-api-access-zp2td\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.208544 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1851dcf-8208-4b33-beda-ad403bd515dc-catalog-content\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.208604 4665 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1851dcf-8208-4b33-beda-ad403bd515dc-utilities\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.310758 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zp2td\" (UniqueName: \"kubernetes.io/projected/b1851dcf-8208-4b33-beda-ad403bd515dc-kube-api-access-zp2td\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.311071 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1851dcf-8208-4b33-beda-ad403bd515dc-catalog-content\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.311117 4665 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1851dcf-8208-4b33-beda-ad403bd515dc-utilities\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.311524 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b1851dcf-8208-4b33-beda-ad403bd515dc-catalog-content\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.311582 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b1851dcf-8208-4b33-beda-ad403bd515dc-utilities\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.330190 4665 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zp2td\" (UniqueName: \"kubernetes.io/projected/b1851dcf-8208-4b33-beda-ad403bd515dc-kube-api-access-zp2td\") pod \"community-operators-24c4n\" (UID: \"b1851dcf-8208-4b33-beda-ad403bd515dc\") " pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.417806 4665 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:18 crc kubenswrapper[4665]: I1205 02:44:18.942210 4665 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-24c4n"] Dec 05 02:44:19 crc kubenswrapper[4665]: I1205 02:44:19.736682 4665 generic.go:334] "Generic (PLEG): container finished" podID="b1851dcf-8208-4b33-beda-ad403bd515dc" containerID="9a473908b6c81561442d90f6d80d4b9845860cd100aff8622446d8407959cb8e" exitCode=0 Dec 05 02:44:19 crc kubenswrapper[4665]: I1205 02:44:19.736783 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-24c4n" event={"ID":"b1851dcf-8208-4b33-beda-ad403bd515dc","Type":"ContainerDied","Data":"9a473908b6c81561442d90f6d80d4b9845860cd100aff8622446d8407959cb8e"} Dec 05 02:44:19 crc kubenswrapper[4665]: I1205 02:44:19.736954 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-24c4n" event={"ID":"b1851dcf-8208-4b33-beda-ad403bd515dc","Type":"ContainerStarted","Data":"4a8155672d512faa7f1ef1105f9e9a31ea9a03a1cf1638a85c9ff15d0616079e"} Dec 05 02:44:20 crc kubenswrapper[4665]: I1205 02:44:20.747440 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-24c4n" event={"ID":"b1851dcf-8208-4b33-beda-ad403bd515dc","Type":"ContainerStarted","Data":"d5909a142a7bd4b410f676d9d4a1bf12eb00c3c999a290c5633f120143a47b0e"} Dec 05 02:44:21 crc kubenswrapper[4665]: I1205 02:44:21.759144 4665 generic.go:334] "Generic (PLEG): container finished" podID="b1851dcf-8208-4b33-beda-ad403bd515dc" containerID="d5909a142a7bd4b410f676d9d4a1bf12eb00c3c999a290c5633f120143a47b0e" exitCode=0 Dec 05 02:44:21 crc kubenswrapper[4665]: I1205 02:44:21.759232 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-24c4n" event={"ID":"b1851dcf-8208-4b33-beda-ad403bd515dc","Type":"ContainerDied","Data":"d5909a142a7bd4b410f676d9d4a1bf12eb00c3c999a290c5633f120143a47b0e"} Dec 05 02:44:22 crc kubenswrapper[4665]: I1205 02:44:22.773258 4665 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-24c4n" event={"ID":"b1851dcf-8208-4b33-beda-ad403bd515dc","Type":"ContainerStarted","Data":"7350390cc18fc754ce2af66b23aeddb0ce5432e79ace9a05ae9eb95d5af3e774"} Dec 05 02:44:22 crc kubenswrapper[4665]: I1205 02:44:22.799415 4665 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-24c4n" podStartSLOduration=2.356925723 podStartE2EDuration="4.79926064s" podCreationTimestamp="2025-12-05 02:44:18 +0000 UTC" firstStartedPulling="2025-12-05 02:44:19.740499528 +0000 UTC m=+5635.079891827" lastFinishedPulling="2025-12-05 02:44:22.182834445 +0000 UTC m=+5637.522226744" observedRunningTime="2025-12-05 02:44:22.796536214 +0000 UTC m=+5638.135928533" watchObservedRunningTime="2025-12-05 02:44:22.79926064 +0000 UTC m=+5638.138652949" Dec 05 02:44:28 crc kubenswrapper[4665]: I1205 02:44:28.419732 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:28 crc kubenswrapper[4665]: I1205 02:44:28.420333 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:28 crc kubenswrapper[4665]: I1205 02:44:28.475819 4665 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:28 crc kubenswrapper[4665]: I1205 02:44:28.907285 4665 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-24c4n" Dec 05 02:44:28 crc kubenswrapper[4665]: I1205 02:44:28.955887 4665 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-24c4n"] var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114443424024447 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114443425017365 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114430003016474 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114430003015444 5ustar corecore